1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_alloca (tree, rtx);
140 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
141 static rtx expand_builtin_frame_address (tree, tree);
142 static rtx expand_builtin_fputs (tree, rtx, bool);
143 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
146 static tree stabilize_va_list (tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_expect (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static tree rewrite_call_expr (tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static bool integer_valued_real_p (tree);
157 static tree fold_trunc_transparent_mathfn (tree, tree);
158 static bool readonly_data_expr (tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (tree, tree);
162 static tree fold_builtin_cbrt (tree, tree);
163 static tree fold_builtin_pow (tree, tree, tree, tree);
164 static tree fold_builtin_powi (tree, tree, tree, tree);
165 static tree fold_builtin_cos (tree, tree, tree);
166 static tree fold_builtin_cosh (tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (tree, tree);
169 static tree fold_builtin_floor (tree, tree);
170 static tree fold_builtin_ceil (tree, tree);
171 static tree fold_builtin_round (tree, tree);
172 static tree fold_builtin_int_roundingfn (tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
175 static tree fold_builtin_strchr (tree, tree, tree);
176 static tree fold_builtin_memchr (tree, tree, tree, tree);
177 static tree fold_builtin_memcmp (tree, tree, tree);
178 static tree fold_builtin_strcmp (tree, tree);
179 static tree fold_builtin_strncmp (tree, tree, tree);
180 static tree fold_builtin_signbit (tree, tree);
181 static tree fold_builtin_copysign (tree, tree, tree, tree);
182 static tree fold_builtin_isascii (tree);
183 static tree fold_builtin_toascii (tree);
184 static tree fold_builtin_isdigit (tree);
185 static tree fold_builtin_fabs (tree, tree);
186 static tree fold_builtin_abs (tree, tree);
187 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
189 static tree fold_builtin_n (tree, tree *, int, bool);
190 static tree fold_builtin_0 (tree, bool);
191 static tree fold_builtin_1 (tree, tree, bool);
192 static tree fold_builtin_2 (tree, tree, tree, bool);
193 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (tree, tree, bool);
197 static tree fold_builtin_strpbrk (tree, tree, tree);
198 static tree fold_builtin_strstr (tree, tree, tree);
199 static tree fold_builtin_strrchr (tree, tree, tree);
200 static tree fold_builtin_strcat (tree, tree);
201 static tree fold_builtin_strncat (tree, tree, tree);
202 static tree fold_builtin_strspn (tree, tree);
203 static tree fold_builtin_strcspn (tree, tree);
204 static tree fold_builtin_sprintf (tree, tree, tree, int);
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree do_mpfr_bessel_n (tree, tree, tree,
236 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_remquo (tree, tree, tree);
239 static tree do_mpfr_lgamma_r (tree, tree, tree);
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
246 static bool called_as_built_in (tree node)
248 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
249 if (strncmp (name, "__builtin_", 10) == 0)
251 if (strncmp (name, "__sync_", 7) == 0)
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
265 get_pointer_alignment (tree exp, unsigned int max_align)
267 unsigned int align, inner;
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize && flag_tree_ter))
273 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
276 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
277 align = MIN (align, max_align);
281 switch (TREE_CODE (exp))
285 case NON_LVALUE_EXPR:
286 exp = TREE_OPERAND (exp, 0);
287 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
290 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
291 align = MIN (inner, max_align);
294 case POINTER_PLUS_EXPR:
295 /* If sum of pointer + int, restrict our maximum alignment to that
296 imposed by the integer. If not, we can't do any better than
298 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
301 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
302 & (max_align / BITS_PER_UNIT - 1))
306 exp = TREE_OPERAND (exp, 0);
310 /* See what we are pointing at and look at its alignment. */
311 exp = TREE_OPERAND (exp, 0);
313 if (handled_component_p (exp))
315 HOST_WIDE_INT bitsize, bitpos;
317 enum machine_mode mode;
318 int unsignedp, volatilep;
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
323 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
324 if (offset && TREE_CODE (offset) == PLUS_EXPR
325 && host_integerp (TREE_OPERAND (offset, 1), 1))
327 /* Any overflow in calculating offset_bits won't change
330 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
334 inner = MIN (inner, (offset_bits & -offset_bits));
335 offset = TREE_OPERAND (offset, 0);
337 if (offset && TREE_CODE (offset) == MULT_EXPR
338 && host_integerp (TREE_OPERAND (offset, 1), 1))
340 /* Any overflow in calculating offset_factor won't change
342 unsigned offset_factor
343 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
347 inner = MIN (inner, (offset_factor & -offset_factor));
350 inner = MIN (inner, BITS_PER_UNIT);
353 align = MIN (inner, DECL_ALIGN (exp));
354 #ifdef CONSTANT_ALIGNMENT
355 else if (CONSTANT_CLASS_P (exp))
356 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
358 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
359 || TREE_CODE (exp) == INDIRECT_REF)
360 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
362 align = MIN (align, inner);
363 return MIN (align, max_align);
371 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
372 way, because it could contain a zero byte in the middle.
373 TREE_STRING_LENGTH is the size of the character array, not the string.
375 ONLY_VALUE should be nonzero if the result is not going to be emitted
376 into the instruction stream and zero if it is going to be expanded.
377 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
378 is returned, otherwise NULL, since
379 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
380 evaluate the side-effects.
382 The value returned is of type `ssizetype'.
384 Unfortunately, string_constant can't access the values of const char
385 arrays with initializers, so neither can we do so here. */
388 c_strlen (tree src, int only_value)
391 HOST_WIDE_INT offset;
396 if (TREE_CODE (src) == COND_EXPR
397 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
401 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
402 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
403 if (tree_int_cst_equal (len1, len2))
407 if (TREE_CODE (src) == COMPOUND_EXPR
408 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
409 return c_strlen (TREE_OPERAND (src, 1), only_value);
411 src = string_constant (src, &offset_node);
415 max = TREE_STRING_LENGTH (src) - 1;
416 ptr = TREE_STRING_POINTER (src);
418 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
420 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
421 compute the offset to the following null if we don't know where to
422 start searching for it. */
425 for (i = 0; i < max; i++)
429 /* We don't know the starting offset, but we do know that the string
430 has no internal zero bytes. We can assume that the offset falls
431 within the bounds of the string; otherwise, the programmer deserves
432 what he gets. Subtract the offset from the length of the string,
433 and return that. This would perhaps not be valid if we were dealing
434 with named arrays in addition to literal string constants. */
436 return size_diffop (size_int (max), offset_node);
439 /* We have a known offset into the string. Start searching there for
440 a null character if we can represent it as a single HOST_WIDE_INT. */
441 if (offset_node == 0)
443 else if (! host_integerp (offset_node, 0))
446 offset = tree_low_cst (offset_node, 0);
448 /* If the offset is known to be out of bounds, warn, and call strlen at
450 if (offset < 0 || offset > max)
452 warning (0, "offset outside bounds of constant string");
456 /* Use strlen to search for the first zero byte. Since any strings
457 constructed with build_string will have nulls appended, we win even
458 if we get handed something like (char[4])"abcd".
460 Since OFFSET is our starting index into the string, no further
461 calculation is needed. */
462 return ssize_int (strlen (ptr + offset));
465 /* Return a char pointer for a C string if it is a string constant
466 or sum of string constant and integer constant. */
473 src = string_constant (src, &offset_node);
477 if (offset_node == 0)
478 return TREE_STRING_POINTER (src);
479 else if (!host_integerp (offset_node, 1)
480 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
486 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
487 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
490 c_readstr (const char *str, enum machine_mode mode)
496 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
501 for (i = 0; i < GET_MODE_SIZE (mode); i++)
504 if (WORDS_BIG_ENDIAN)
505 j = GET_MODE_SIZE (mode) - i - 1;
506 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
507 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
508 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
510 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
513 ch = (unsigned char) str[i];
514 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
516 return immed_double_const (c[0], c[1], mode);
519 /* Cast a target constant CST to target CHAR and if that value fits into
520 host char type, return zero and put that value into variable pointed to by
524 target_char_cast (tree cst, char *p)
526 unsigned HOST_WIDE_INT val, hostval;
528 if (!host_integerp (cst, 1)
529 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 val = tree_low_cst (cst, 1);
533 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
534 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
537 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
538 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
547 /* Similar to save_expr, but assumes that arbitrary code is not executed
548 in between the multiple evaluations. In particular, we assume that a
549 non-addressable local variable will not be modified. */
552 builtin_save_expr (tree exp)
554 if (TREE_ADDRESSABLE (exp) == 0
555 && (TREE_CODE (exp) == PARM_DECL
556 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return save_expr (exp);
562 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
563 times to get the address of either a higher stack frame, or a return
564 address located within it (depending on FNDECL_CODE). */
567 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 #ifdef INITIAL_FRAME_ADDRESS_RTX
572 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
576 /* For a zero count with __builtin_return_address, we don't care what
577 frame address we return, because target-specific definitions will
578 override us. Therefore frame pointer elimination is OK, and using
579 the soft frame pointer is OK.
581 For a nonzero count, or a zero count with __builtin_frame_address,
582 we require a stable offset from the current frame pointer to the
583 previous one, so we must use the hard frame pointer, and
584 we must disable frame pointer elimination. */
585 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
586 tem = frame_pointer_rtx;
589 tem = hard_frame_pointer_rtx;
591 /* Tell reload not to eliminate the frame pointer. */
592 current_function_accesses_prior_frames = 1;
596 /* Some machines need special handling before we can access
597 arbitrary frames. For example, on the SPARC, we must first flush
598 all register windows to the stack. */
599 #ifdef SETUP_FRAME_ADDRESSES
601 SETUP_FRAME_ADDRESSES ();
604 /* On the SPARC, the return address is not in the frame, it is in a
605 register. There is no way to access it off of the current frame
606 pointer, but it can be accessed off the previous frame pointer by
607 reading the value from the register window save area. */
608 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
609 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
613 /* Scan back COUNT frames to the specified frame. */
614 for (i = 0; i < count; i++)
616 /* Assume the dynamic chain pointer is in the word that the
617 frame address points to, unless otherwise specified. */
618 #ifdef DYNAMIC_CHAIN_ADDRESS
619 tem = DYNAMIC_CHAIN_ADDRESS (tem);
621 tem = memory_address (Pmode, tem);
622 tem = gen_frame_mem (Pmode, tem);
623 tem = copy_to_reg (tem);
626 /* For __builtin_frame_address, return what we've got. But, on
627 the SPARC for example, we may have to add a bias. */
628 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
629 #ifdef FRAME_ADDR_RTX
630 return FRAME_ADDR_RTX (tem);
635 /* For __builtin_return_address, get the return address from that frame. */
636 #ifdef RETURN_ADDR_RTX
637 tem = RETURN_ADDR_RTX (count, tem);
639 tem = memory_address (Pmode,
640 plus_constant (tem, GET_MODE_SIZE (Pmode)));
641 tem = gen_frame_mem (Pmode, tem);
646 /* Alias set used for setjmp buffer. */
647 static HOST_WIDE_INT setjmp_alias_set = -1;
649 /* Construct the leading half of a __builtin_setjmp call. Control will
650 return to RECEIVER_LABEL. This is also called directly by the SJLJ
651 exception handling code. */
654 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
656 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
660 if (setjmp_alias_set == -1)
661 setjmp_alias_set = new_alias_set ();
663 buf_addr = convert_memory_address (Pmode, buf_addr);
665 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
667 /* We store the frame pointer and the address of receiver_label in
668 the buffer and use the rest of it for the stack save area, which
669 is machine-dependent. */
671 mem = gen_rtx_MEM (Pmode, buf_addr);
672 set_mem_alias_set (mem, setjmp_alias_set);
673 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
675 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
676 set_mem_alias_set (mem, setjmp_alias_set);
678 emit_move_insn (validize_mem (mem),
679 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
681 stack_save = gen_rtx_MEM (sa_mode,
682 plus_constant (buf_addr,
683 2 * GET_MODE_SIZE (Pmode)));
684 set_mem_alias_set (stack_save, setjmp_alias_set);
685 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
687 /* If there is further processing to do, do it. */
688 #ifdef HAVE_builtin_setjmp_setup
689 if (HAVE_builtin_setjmp_setup)
690 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 /* Tell optimize_save_area_alloca that extra work is going to
694 need to go on during alloca. */
695 current_function_calls_setjmp = 1;
697 /* We have a nonlocal label. */
698 current_function_has_nonlocal_label = 1;
701 /* Construct the trailing part of a __builtin_setjmp call. This is
702 also called directly by the SJLJ exception handling code. */
705 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
707 /* Clobber the FP when we get here, so we have to make sure it's
708 marked as used by this function. */
709 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
711 /* Mark the static chain as clobbered here so life information
712 doesn't get messed up for it. */
713 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
715 /* Now put in the code to restore the frame pointer, and argument
716 pointer, if needed. */
717 #ifdef HAVE_nonlocal_goto
718 if (! HAVE_nonlocal_goto)
721 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
722 /* This might change the hard frame pointer in ways that aren't
723 apparent to early optimization passes, so force a clobber. */
724 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
727 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
728 if (fixed_regs[ARG_POINTER_REGNUM])
730 #ifdef ELIMINABLE_REGS
732 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
734 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
735 if (elim_regs[i].from == ARG_POINTER_REGNUM
736 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 if (i == ARRAY_SIZE (elim_regs))
742 /* Now restore our arg pointer from the address at which it
743 was saved in our stack frame. */
744 emit_move_insn (virtual_incoming_args_rtx,
745 copy_to_reg (get_arg_pointer_save_area (cfun)));
750 #ifdef HAVE_builtin_setjmp_receiver
751 if (HAVE_builtin_setjmp_receiver)
752 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 #ifdef HAVE_nonlocal_goto_receiver
756 if (HAVE_nonlocal_goto_receiver)
757 emit_insn (gen_nonlocal_goto_receiver ());
762 /* We must not allow the code we just generated to be reordered by
763 scheduling. Specifically, the update of the frame pointer must
764 happen immediately, not later. */
765 emit_insn (gen_blockage ());
768 /* __builtin_longjmp is passed a pointer to an array of five words (not
769 all will be used on all machines). It operates similarly to the C
770 library function of the same name, but is more efficient. Much of
771 the code below is copied from the handling of non-local gotos. */
774 expand_builtin_longjmp (rtx buf_addr, rtx value)
776 rtx fp, lab, stack, insn, last;
777 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
779 if (setjmp_alias_set == -1)
780 setjmp_alias_set = new_alias_set ();
782 buf_addr = convert_memory_address (Pmode, buf_addr);
784 buf_addr = force_reg (Pmode, buf_addr);
786 /* We used to store value in static_chain_rtx, but that fails if pointers
787 are smaller than integers. We instead require that the user must pass
788 a second argument of 1, because that is what builtin_setjmp will
789 return. This also makes EH slightly more efficient, since we are no
790 longer copying around a value that we don't care about. */
791 gcc_assert (value == const1_rtx);
793 last = get_last_insn ();
794 #ifdef HAVE_builtin_longjmp
795 if (HAVE_builtin_longjmp)
796 emit_insn (gen_builtin_longjmp (buf_addr));
800 fp = gen_rtx_MEM (Pmode, buf_addr);
801 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
802 GET_MODE_SIZE (Pmode)));
804 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
805 2 * GET_MODE_SIZE (Pmode)));
806 set_mem_alias_set (fp, setjmp_alias_set);
807 set_mem_alias_set (lab, setjmp_alias_set);
808 set_mem_alias_set (stack, setjmp_alias_set);
810 /* Pick up FP, label, and SP from the block and jump. This code is
811 from expand_goto in stmt.c; see there for detailed comments. */
812 #ifdef HAVE_nonlocal_goto
813 if (HAVE_nonlocal_goto)
814 /* We have to pass a value to the nonlocal_goto pattern that will
815 get copied into the static_chain pointer, but it does not matter
816 what that value is, because builtin_setjmp does not use it. */
817 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
821 lab = copy_to_reg (lab);
823 emit_insn (gen_rtx_CLOBBER (VOIDmode,
824 gen_rtx_MEM (BLKmode,
825 gen_rtx_SCRATCH (VOIDmode))));
826 emit_insn (gen_rtx_CLOBBER (VOIDmode,
827 gen_rtx_MEM (BLKmode,
828 hard_frame_pointer_rtx)));
830 emit_move_insn (hard_frame_pointer_rtx, fp);
831 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
833 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
834 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
835 emit_indirect_jump (lab);
839 /* Search backwards and mark the jump insn as a non-local goto.
840 Note that this precludes the use of __builtin_longjmp to a
841 __builtin_setjmp target in the same function. However, we've
842 already cautioned the user that these functions are for
843 internal exception handling use only. */
844 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
846 gcc_assert (insn != last);
850 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
854 else if (CALL_P (insn))
859 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
860 and the address of the save area. */
863 expand_builtin_nonlocal_goto (tree exp)
865 tree t_label, t_save_area;
866 rtx r_label, r_save_area, r_fp, r_sp, insn;
868 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
871 t_label = CALL_EXPR_ARG (exp, 0);
872 t_save_area = CALL_EXPR_ARG (exp, 1);
874 r_label = expand_normal (t_label);
875 r_label = convert_memory_address (Pmode, r_label);
876 r_save_area = expand_normal (t_save_area);
877 r_save_area = convert_memory_address (Pmode, r_save_area);
878 r_fp = gen_rtx_MEM (Pmode, r_save_area);
879 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
880 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
882 current_function_has_nonlocal_goto = 1;
884 #ifdef HAVE_nonlocal_goto
885 /* ??? We no longer need to pass the static chain value, afaik. */
886 if (HAVE_nonlocal_goto)
887 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
891 r_label = copy_to_reg (r_label);
893 emit_insn (gen_rtx_CLOBBER (VOIDmode,
894 gen_rtx_MEM (BLKmode,
895 gen_rtx_SCRATCH (VOIDmode))));
897 emit_insn (gen_rtx_CLOBBER (VOIDmode,
898 gen_rtx_MEM (BLKmode,
899 hard_frame_pointer_rtx)));
901 /* Restore frame pointer for containing function.
902 This sets the actual hard register used for the frame pointer
903 to the location of the function's incoming static chain info.
904 The non-local goto handler will then adjust it to contain the
905 proper value and reload the argument pointer, if needed. */
906 emit_move_insn (hard_frame_pointer_rtx, r_fp);
907 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
909 /* USE of hard_frame_pointer_rtx added for consistency;
910 not clear if really needed. */
911 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
912 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
913 emit_indirect_jump (r_label);
916 /* Search backwards to the jump insn and mark it as a
918 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
922 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
923 const0_rtx, REG_NOTES (insn));
926 else if (CALL_P (insn))
933 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
934 (not all will be used on all machines) that was passed to __builtin_setjmp.
935 It updates the stack pointer in that block to correspond to the current
939 expand_builtin_update_setjmp_buf (rtx buf_addr)
941 enum machine_mode sa_mode = Pmode;
945 #ifdef HAVE_save_stack_nonlocal
946 if (HAVE_save_stack_nonlocal)
947 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
949 #ifdef STACK_SAVEAREA_MODE
950 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954 = gen_rtx_MEM (sa_mode,
957 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
961 emit_insn (gen_setjmp ());
964 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
967 /* Expand a call to __builtin_prefetch. For a target that does not support
968 data prefetch, evaluate the memory address argument in case it has side
972 expand_builtin_prefetch (tree exp)
974 tree arg0, arg1, arg2;
978 if (!validate_arglist (exp, POINTER_TYPE, 0))
981 arg0 = CALL_EXPR_ARG (exp, 0);
983 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
984 zero (read) and argument 2 (locality) defaults to 3 (high degree of
986 nargs = call_expr_nargs (exp);
988 arg1 = CALL_EXPR_ARG (exp, 1);
990 arg1 = integer_zero_node;
992 arg2 = CALL_EXPR_ARG (exp, 2);
994 arg2 = build_int_cst (NULL_TREE, 3);
996 /* Argument 0 is an address. */
997 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
999 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1000 if (TREE_CODE (arg1) != INTEGER_CST)
1002 error ("second argument to %<__builtin_prefetch%> must be a constant");
1003 arg1 = integer_zero_node;
1005 op1 = expand_normal (arg1);
1006 /* Argument 1 must be either zero or one. */
1007 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1009 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1014 /* Argument 2 (locality) must be a compile-time constant int. */
1015 if (TREE_CODE (arg2) != INTEGER_CST)
1017 error ("third argument to %<__builtin_prefetch%> must be a constant");
1018 arg2 = integer_zero_node;
1020 op2 = expand_normal (arg2);
1021 /* Argument 2 must be 0, 1, 2, or 3. */
1022 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1024 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1028 #ifdef HAVE_prefetch
1031 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1033 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1034 || (GET_MODE (op0) != Pmode))
1036 op0 = convert_memory_address (Pmode, op0);
1037 op0 = force_reg (Pmode, op0);
1039 emit_insn (gen_prefetch (op0, op1, op2));
1043 /* Don't do anything with direct references to volatile memory, but
1044 generate code to handle other side effects. */
1045 if (!MEM_P (op0) && side_effects_p (op0))
1049 /* Get a MEM rtx for expression EXP which is the address of an operand
1050 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1051 the maximum length of the block of memory that might be accessed or
1055 get_memory_rtx (tree exp, tree len)
1057 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1058 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1060 /* Get an expression we can use to find the attributes to assign to MEM.
1061 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1062 we can. First remove any nops. */
1063 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1064 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1065 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1066 exp = TREE_OPERAND (exp, 0);
1068 if (TREE_CODE (exp) == ADDR_EXPR)
1069 exp = TREE_OPERAND (exp, 0);
1070 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1071 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1075 /* Honor attributes derived from exp, except for the alias set
1076 (as builtin stringops may alias with anything) and the size
1077 (as stringops may access multiple array elements). */
1080 set_mem_attributes (mem, exp, 0);
1082 /* Allow the string and memory builtins to overflow from one
1083 field into another, see http://gcc.gnu.org/PR23561.
1084 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1085 memory accessed by the string or memory builtin will fit
1086 within the field. */
1087 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1089 tree mem_expr = MEM_EXPR (mem);
1090 HOST_WIDE_INT offset = -1, length = -1;
1093 while (TREE_CODE (inner) == ARRAY_REF
1094 || TREE_CODE (inner) == NOP_EXPR
1095 || TREE_CODE (inner) == CONVERT_EXPR
1096 || TREE_CODE (inner) == NON_LVALUE_EXPR
1097 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1098 || TREE_CODE (inner) == SAVE_EXPR)
1099 inner = TREE_OPERAND (inner, 0);
1101 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1103 if (MEM_OFFSET (mem)
1104 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1105 offset = INTVAL (MEM_OFFSET (mem));
1107 if (offset >= 0 && len && host_integerp (len, 0))
1108 length = tree_low_cst (len, 0);
1110 while (TREE_CODE (inner) == COMPONENT_REF)
1112 tree field = TREE_OPERAND (inner, 1);
1113 gcc_assert (! DECL_BIT_FIELD (field));
1114 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1115 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1118 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1119 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1122 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1123 /* If we can prove the memory starting at XEXP (mem, 0)
1124 and ending at XEXP (mem, 0) + LENGTH will fit into
1125 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1128 && offset + length <= size)
1133 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1134 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1135 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1143 mem_expr = TREE_OPERAND (mem_expr, 0);
1144 inner = TREE_OPERAND (inner, 0);
1147 if (mem_expr == NULL)
1149 if (mem_expr != MEM_EXPR (mem))
1151 set_mem_expr (mem, mem_expr);
1152 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1155 set_mem_alias_set (mem, 0);
1156 set_mem_size (mem, NULL_RTX);
1162 /* Built-in functions to perform an untyped call and return. */
1164 /* For each register that may be used for calling a function, this
1165 gives a mode used to copy the register's value. VOIDmode indicates
1166 the register is not used for calling a function. If the machine
1167 has register windows, this gives only the outbound registers.
1168 INCOMING_REGNO gives the corresponding inbound register. */
1169 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1171 /* For each register that may be used for returning values, this gives
1172 a mode used to copy the register's value. VOIDmode indicates the
1173 register is not used for returning values. If the machine has
1174 register windows, this gives only the outbound registers.
1175 INCOMING_REGNO gives the corresponding inbound register. */
1176 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1178 /* For each register that may be used for calling a function, this
1179 gives the offset of that register into the block returned by
1180 __builtin_apply_args. 0 indicates that the register is not
1181 used for calling a function. */
1182 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1184 /* Return the size required for the block returned by __builtin_apply_args,
1185 and initialize apply_args_mode. */
1188 apply_args_size (void)
1190 static int size = -1;
1193 enum machine_mode mode;
1195 /* The values computed by this function never change. */
1198 /* The first value is the incoming arg-pointer. */
1199 size = GET_MODE_SIZE (Pmode);
1201 /* The second value is the structure value address unless this is
1202 passed as an "invisible" first argument. */
1203 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1204 size += GET_MODE_SIZE (Pmode);
1206 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1207 if (FUNCTION_ARG_REGNO_P (regno))
1209 mode = reg_raw_mode[regno];
1211 gcc_assert (mode != VOIDmode);
1213 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1214 if (size % align != 0)
1215 size = CEIL (size, align) * align;
1216 apply_args_reg_offset[regno] = size;
1217 size += GET_MODE_SIZE (mode);
1218 apply_args_mode[regno] = mode;
1222 apply_args_mode[regno] = VOIDmode;
1223 apply_args_reg_offset[regno] = 0;
1229 /* Return the size required for the block returned by __builtin_apply,
1230 and initialize apply_result_mode. */
1233 apply_result_size (void)
1235 static int size = -1;
1237 enum machine_mode mode;
1239 /* The values computed by this function never change. */
1244 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1245 if (FUNCTION_VALUE_REGNO_P (regno))
1247 mode = reg_raw_mode[regno];
1249 gcc_assert (mode != VOIDmode);
1251 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1252 if (size % align != 0)
1253 size = CEIL (size, align) * align;
1254 size += GET_MODE_SIZE (mode);
1255 apply_result_mode[regno] = mode;
1258 apply_result_mode[regno] = VOIDmode;
1260 /* Allow targets that use untyped_call and untyped_return to override
1261 the size so that machine-specific information can be stored here. */
1262 #ifdef APPLY_RESULT_SIZE
1263 size = APPLY_RESULT_SIZE;
1269 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1270 /* Create a vector describing the result block RESULT. If SAVEP is true,
1271 the result block is used to save the values; otherwise it is used to
1272 restore the values. */
1275 result_vector (int savep, rtx result)
1277 int regno, size, align, nelts;
1278 enum machine_mode mode;
1280 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1284 if ((mode = apply_result_mode[regno]) != VOIDmode)
1286 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1287 if (size % align != 0)
1288 size = CEIL (size, align) * align;
1289 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1290 mem = adjust_address (result, mode, size);
1291 savevec[nelts++] = (savep
1292 ? gen_rtx_SET (VOIDmode, mem, reg)
1293 : gen_rtx_SET (VOIDmode, reg, mem));
1294 size += GET_MODE_SIZE (mode);
1296 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1298 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1300 /* Save the state required to perform an untyped call with the same
1301 arguments as were passed to the current function. */
1304 expand_builtin_apply_args_1 (void)
1307 int size, align, regno;
1308 enum machine_mode mode;
1309 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1311 /* Create a block where the arg-pointer, structure value address,
1312 and argument registers can be saved. */
1313 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1315 /* Walk past the arg-pointer and structure value address. */
1316 size = GET_MODE_SIZE (Pmode);
1317 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1318 size += GET_MODE_SIZE (Pmode);
1320 /* Save each register used in calling a function to the block. */
1321 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1322 if ((mode = apply_args_mode[regno]) != VOIDmode)
1324 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1325 if (size % align != 0)
1326 size = CEIL (size, align) * align;
1328 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1330 emit_move_insn (adjust_address (registers, mode, size), tem);
1331 size += GET_MODE_SIZE (mode);
1334 /* Save the arg pointer to the block. */
1335 tem = copy_to_reg (virtual_incoming_args_rtx);
1336 #ifdef STACK_GROWS_DOWNWARD
1337 /* We need the pointer as the caller actually passed them to us, not
1338 as we might have pretended they were passed. Make sure it's a valid
1339 operand, as emit_move_insn isn't expected to handle a PLUS. */
1341 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1344 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1346 size = GET_MODE_SIZE (Pmode);
1348 /* Save the structure value address unless this is passed as an
1349 "invisible" first argument. */
1350 if (struct_incoming_value)
1352 emit_move_insn (adjust_address (registers, Pmode, size),
1353 copy_to_reg (struct_incoming_value));
1354 size += GET_MODE_SIZE (Pmode);
1357 /* Return the address of the block. */
1358 return copy_addr_to_reg (XEXP (registers, 0));
1361 /* __builtin_apply_args returns block of memory allocated on
1362 the stack into which is stored the arg pointer, structure
1363 value address, static chain, and all the registers that might
1364 possibly be used in performing a function call. The code is
1365 moved to the start of the function so the incoming values are
1369 expand_builtin_apply_args (void)
1371 /* Don't do __builtin_apply_args more than once in a function.
1372 Save the result of the first call and reuse it. */
1373 if (apply_args_value != 0)
1374 return apply_args_value;
1376 /* When this function is called, it means that registers must be
1377 saved on entry to this function. So we migrate the
1378 call to the first insn of this function. */
1383 temp = expand_builtin_apply_args_1 ();
1387 apply_args_value = temp;
1389 /* Put the insns after the NOTE that starts the function.
1390 If this is inside a start_sequence, make the outer-level insn
1391 chain current, so the code is placed at the start of the
1393 push_topmost_sequence ();
1394 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1395 pop_topmost_sequence ();
1400 /* Perform an untyped call and save the state required to perform an
1401 untyped return of whatever value was returned by the given function. */
1404 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1406 int size, align, regno;
1407 enum machine_mode mode;
1408 rtx incoming_args, result, reg, dest, src, call_insn;
1409 rtx old_stack_level = 0;
1410 rtx call_fusage = 0;
1411 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1413 arguments = convert_memory_address (Pmode, arguments);
1415 /* Create a block where the return registers can be saved. */
1416 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1418 /* Fetch the arg pointer from the ARGUMENTS block. */
1419 incoming_args = gen_reg_rtx (Pmode);
1420 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1421 #ifndef STACK_GROWS_DOWNWARD
1422 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1423 incoming_args, 0, OPTAB_LIB_WIDEN);
1426 /* Push a new argument block and copy the arguments. Do not allow
1427 the (potential) memcpy call below to interfere with our stack
1429 do_pending_stack_adjust ();
1432 /* Save the stack with nonlocal if available. */
1433 #ifdef HAVE_save_stack_nonlocal
1434 if (HAVE_save_stack_nonlocal)
1435 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1438 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1440 /* Allocate a block of memory onto the stack and copy the memory
1441 arguments to the outgoing arguments address. */
1442 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1443 dest = virtual_outgoing_args_rtx;
1444 #ifndef STACK_GROWS_DOWNWARD
1445 if (GET_CODE (argsize) == CONST_INT)
1446 dest = plus_constant (dest, -INTVAL (argsize));
1448 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1450 dest = gen_rtx_MEM (BLKmode, dest);
1451 set_mem_align (dest, PARM_BOUNDARY);
1452 src = gen_rtx_MEM (BLKmode, incoming_args);
1453 set_mem_align (src, PARM_BOUNDARY);
1454 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1456 /* Refer to the argument block. */
1458 arguments = gen_rtx_MEM (BLKmode, arguments);
1459 set_mem_align (arguments, PARM_BOUNDARY);
1461 /* Walk past the arg-pointer and structure value address. */
1462 size = GET_MODE_SIZE (Pmode);
1464 size += GET_MODE_SIZE (Pmode);
1466 /* Restore each of the registers previously saved. Make USE insns
1467 for each of these registers for use in making the call. */
1468 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1469 if ((mode = apply_args_mode[regno]) != VOIDmode)
1471 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1472 if (size % align != 0)
1473 size = CEIL (size, align) * align;
1474 reg = gen_rtx_REG (mode, regno);
1475 emit_move_insn (reg, adjust_address (arguments, mode, size));
1476 use_reg (&call_fusage, reg);
1477 size += GET_MODE_SIZE (mode);
1480 /* Restore the structure value address unless this is passed as an
1481 "invisible" first argument. */
1482 size = GET_MODE_SIZE (Pmode);
1485 rtx value = gen_reg_rtx (Pmode);
1486 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1487 emit_move_insn (struct_value, value);
1488 if (REG_P (struct_value))
1489 use_reg (&call_fusage, struct_value);
1490 size += GET_MODE_SIZE (Pmode);
1493 /* All arguments and registers used for the call are set up by now! */
1494 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1496 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1497 and we don't want to load it into a register as an optimization,
1498 because prepare_call_address already did it if it should be done. */
1499 if (GET_CODE (function) != SYMBOL_REF)
1500 function = memory_address (FUNCTION_MODE, function);
1502 /* Generate the actual call instruction and save the return value. */
1503 #ifdef HAVE_untyped_call
1504 if (HAVE_untyped_call)
1505 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1506 result, result_vector (1, result)));
1509 #ifdef HAVE_call_value
1510 if (HAVE_call_value)
1514 /* Locate the unique return register. It is not possible to
1515 express a call that sets more than one return register using
1516 call_value; use untyped_call for that. In fact, untyped_call
1517 only needs to save the return registers in the given block. */
1518 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1519 if ((mode = apply_result_mode[regno]) != VOIDmode)
1521 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1523 valreg = gen_rtx_REG (mode, regno);
1526 emit_call_insn (GEN_CALL_VALUE (valreg,
1527 gen_rtx_MEM (FUNCTION_MODE, function),
1528 const0_rtx, NULL_RTX, const0_rtx));
1530 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1536 /* Find the CALL insn we just emitted, and attach the register usage
1538 call_insn = last_call_insn ();
1539 add_function_usage_to (call_insn, call_fusage);
1541 /* Restore the stack. */
1542 #ifdef HAVE_save_stack_nonlocal
1543 if (HAVE_save_stack_nonlocal)
1544 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1547 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1551 /* Return the address of the result block. */
1552 result = copy_addr_to_reg (XEXP (result, 0));
1553 return convert_memory_address (ptr_mode, result);
1556 /* Perform an untyped return. */
1559 expand_builtin_return (rtx result)
1561 int size, align, regno;
1562 enum machine_mode mode;
1564 rtx call_fusage = 0;
1566 result = convert_memory_address (Pmode, result);
1568 apply_result_size ();
1569 result = gen_rtx_MEM (BLKmode, result);
1571 #ifdef HAVE_untyped_return
1572 if (HAVE_untyped_return)
1574 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1580 /* Restore the return value and note that each value is used. */
1582 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1583 if ((mode = apply_result_mode[regno]) != VOIDmode)
1585 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1586 if (size % align != 0)
1587 size = CEIL (size, align) * align;
1588 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1589 emit_move_insn (reg, adjust_address (result, mode, size));
1591 push_to_sequence (call_fusage);
1592 emit_insn (gen_rtx_USE (VOIDmode, reg));
1593 call_fusage = get_insns ();
1595 size += GET_MODE_SIZE (mode);
1598 /* Put the USE insns before the return. */
1599 emit_insn (call_fusage);
1601 /* Return whatever values was restored by jumping directly to the end
1603 expand_naked_return ();
1606 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1608 static enum type_class
1609 type_to_class (tree type)
1611 switch (TREE_CODE (type))
1613 case VOID_TYPE: return void_type_class;
1614 case INTEGER_TYPE: return integer_type_class;
1615 case ENUMERAL_TYPE: return enumeral_type_class;
1616 case BOOLEAN_TYPE: return boolean_type_class;
1617 case POINTER_TYPE: return pointer_type_class;
1618 case REFERENCE_TYPE: return reference_type_class;
1619 case OFFSET_TYPE: return offset_type_class;
1620 case REAL_TYPE: return real_type_class;
1621 case COMPLEX_TYPE: return complex_type_class;
1622 case FUNCTION_TYPE: return function_type_class;
1623 case METHOD_TYPE: return method_type_class;
1624 case RECORD_TYPE: return record_type_class;
1626 case QUAL_UNION_TYPE: return union_type_class;
1627 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1628 ? string_type_class : array_type_class);
1629 case LANG_TYPE: return lang_type_class;
1630 default: return no_type_class;
1634 /* Expand a call EXP to __builtin_classify_type. */
1637 expand_builtin_classify_type (tree exp)
1639 if (call_expr_nargs (exp))
1640 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1641 return GEN_INT (no_type_class);
1644 /* This helper macro, meant to be used in mathfn_built_in below,
1645 determines which among a set of three builtin math functions is
1646 appropriate for a given type mode. The `F' and `L' cases are
1647 automatically generated from the `double' case. */
1648 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1649 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1650 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1651 fcodel = BUILT_IN_MATHFN##L ; break;
1652 /* Similar to above, but appends _R after any F/L suffix. */
1653 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1654 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1655 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1656 fcodel = BUILT_IN_MATHFN##L_R ; break;
1658 /* Return mathematic function equivalent to FN but operating directly
1659 on TYPE, if available. If we can't do the conversion, return zero. */
1661 mathfn_built_in (tree type, enum built_in_function fn)
1663 enum built_in_function fcode, fcodef, fcodel;
1667 CASE_MATHFN (BUILT_IN_ACOS)
1668 CASE_MATHFN (BUILT_IN_ACOSH)
1669 CASE_MATHFN (BUILT_IN_ASIN)
1670 CASE_MATHFN (BUILT_IN_ASINH)
1671 CASE_MATHFN (BUILT_IN_ATAN)
1672 CASE_MATHFN (BUILT_IN_ATAN2)
1673 CASE_MATHFN (BUILT_IN_ATANH)
1674 CASE_MATHFN (BUILT_IN_CBRT)
1675 CASE_MATHFN (BUILT_IN_CEIL)
1676 CASE_MATHFN (BUILT_IN_CEXPI)
1677 CASE_MATHFN (BUILT_IN_COPYSIGN)
1678 CASE_MATHFN (BUILT_IN_COS)
1679 CASE_MATHFN (BUILT_IN_COSH)
1680 CASE_MATHFN (BUILT_IN_DREM)
1681 CASE_MATHFN (BUILT_IN_ERF)
1682 CASE_MATHFN (BUILT_IN_ERFC)
1683 CASE_MATHFN (BUILT_IN_EXP)
1684 CASE_MATHFN (BUILT_IN_EXP10)
1685 CASE_MATHFN (BUILT_IN_EXP2)
1686 CASE_MATHFN (BUILT_IN_EXPM1)
1687 CASE_MATHFN (BUILT_IN_FABS)
1688 CASE_MATHFN (BUILT_IN_FDIM)
1689 CASE_MATHFN (BUILT_IN_FLOOR)
1690 CASE_MATHFN (BUILT_IN_FMA)
1691 CASE_MATHFN (BUILT_IN_FMAX)
1692 CASE_MATHFN (BUILT_IN_FMIN)
1693 CASE_MATHFN (BUILT_IN_FMOD)
1694 CASE_MATHFN (BUILT_IN_FREXP)
1695 CASE_MATHFN (BUILT_IN_GAMMA)
1696 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1697 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1698 CASE_MATHFN (BUILT_IN_HYPOT)
1699 CASE_MATHFN (BUILT_IN_ILOGB)
1700 CASE_MATHFN (BUILT_IN_INF)
1701 CASE_MATHFN (BUILT_IN_ISINF)
1702 CASE_MATHFN (BUILT_IN_J0)
1703 CASE_MATHFN (BUILT_IN_J1)
1704 CASE_MATHFN (BUILT_IN_JN)
1705 CASE_MATHFN (BUILT_IN_LCEIL)
1706 CASE_MATHFN (BUILT_IN_LDEXP)
1707 CASE_MATHFN (BUILT_IN_LFLOOR)
1708 CASE_MATHFN (BUILT_IN_LGAMMA)
1709 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1710 CASE_MATHFN (BUILT_IN_LLCEIL)
1711 CASE_MATHFN (BUILT_IN_LLFLOOR)
1712 CASE_MATHFN (BUILT_IN_LLRINT)
1713 CASE_MATHFN (BUILT_IN_LLROUND)
1714 CASE_MATHFN (BUILT_IN_LOG)
1715 CASE_MATHFN (BUILT_IN_LOG10)
1716 CASE_MATHFN (BUILT_IN_LOG1P)
1717 CASE_MATHFN (BUILT_IN_LOG2)
1718 CASE_MATHFN (BUILT_IN_LOGB)
1719 CASE_MATHFN (BUILT_IN_LRINT)
1720 CASE_MATHFN (BUILT_IN_LROUND)
1721 CASE_MATHFN (BUILT_IN_MODF)
1722 CASE_MATHFN (BUILT_IN_NAN)
1723 CASE_MATHFN (BUILT_IN_NANS)
1724 CASE_MATHFN (BUILT_IN_NEARBYINT)
1725 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1726 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1727 CASE_MATHFN (BUILT_IN_POW)
1728 CASE_MATHFN (BUILT_IN_POWI)
1729 CASE_MATHFN (BUILT_IN_POW10)
1730 CASE_MATHFN (BUILT_IN_REMAINDER)
1731 CASE_MATHFN (BUILT_IN_REMQUO)
1732 CASE_MATHFN (BUILT_IN_RINT)
1733 CASE_MATHFN (BUILT_IN_ROUND)
1734 CASE_MATHFN (BUILT_IN_SCALB)
1735 CASE_MATHFN (BUILT_IN_SCALBLN)
1736 CASE_MATHFN (BUILT_IN_SCALBN)
1737 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1738 CASE_MATHFN (BUILT_IN_SIN)
1739 CASE_MATHFN (BUILT_IN_SINCOS)
1740 CASE_MATHFN (BUILT_IN_SINH)
1741 CASE_MATHFN (BUILT_IN_SQRT)
1742 CASE_MATHFN (BUILT_IN_TAN)
1743 CASE_MATHFN (BUILT_IN_TANH)
1744 CASE_MATHFN (BUILT_IN_TGAMMA)
1745 CASE_MATHFN (BUILT_IN_TRUNC)
1746 CASE_MATHFN (BUILT_IN_Y0)
1747 CASE_MATHFN (BUILT_IN_Y1)
1748 CASE_MATHFN (BUILT_IN_YN)
1754 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1755 return implicit_built_in_decls[fcode];
1756 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1757 return implicit_built_in_decls[fcodef];
1758 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1759 return implicit_built_in_decls[fcodel];
1764 /* If errno must be maintained, expand the RTL to check if the result,
1765 TARGET, of a built-in function call, EXP, is NaN, and if so set
1769 expand_errno_check (tree exp, rtx target)
1771 rtx lab = gen_label_rtx ();
1773 /* Test the result; if it is NaN, set errno=EDOM because
1774 the argument was not in the domain. */
1775 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1779 /* If this built-in doesn't throw an exception, set errno directly. */
1780 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1782 #ifdef GEN_ERRNO_RTX
1783 rtx errno_rtx = GEN_ERRNO_RTX;
1786 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1788 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1794 /* We can't set errno=EDOM directly; let the library call do it.
1795 Pop the arguments right away in case the call gets deleted. */
1797 expand_call (exp, target, 0);
1802 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1803 Return NULL_RTX if a normal call should be emitted rather than expanding
1804 the function in-line. EXP is the expression that is a call to the builtin
1805 function; if convenient, the result should be placed in TARGET.
1806 SUBTARGET may be used as the target for computing one of EXP's operands. */
1809 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1811 optab builtin_optab;
1812 rtx op0, insns, before_call;
1813 tree fndecl = get_callee_fndecl (exp);
1814 enum machine_mode mode;
1815 bool errno_set = false;
1818 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1821 arg = CALL_EXPR_ARG (exp, 0);
1823 switch (DECL_FUNCTION_CODE (fndecl))
1825 CASE_FLT_FN (BUILT_IN_SQRT):
1826 errno_set = ! tree_expr_nonnegative_p (arg);
1827 builtin_optab = sqrt_optab;
1829 CASE_FLT_FN (BUILT_IN_EXP):
1830 errno_set = true; builtin_optab = exp_optab; break;
1831 CASE_FLT_FN (BUILT_IN_EXP10):
1832 CASE_FLT_FN (BUILT_IN_POW10):
1833 errno_set = true; builtin_optab = exp10_optab; break;
1834 CASE_FLT_FN (BUILT_IN_EXP2):
1835 errno_set = true; builtin_optab = exp2_optab; break;
1836 CASE_FLT_FN (BUILT_IN_EXPM1):
1837 errno_set = true; builtin_optab = expm1_optab; break;
1838 CASE_FLT_FN (BUILT_IN_LOGB):
1839 errno_set = true; builtin_optab = logb_optab; break;
1840 CASE_FLT_FN (BUILT_IN_LOG):
1841 errno_set = true; builtin_optab = log_optab; break;
1842 CASE_FLT_FN (BUILT_IN_LOG10):
1843 errno_set = true; builtin_optab = log10_optab; break;
1844 CASE_FLT_FN (BUILT_IN_LOG2):
1845 errno_set = true; builtin_optab = log2_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOG1P):
1847 errno_set = true; builtin_optab = log1p_optab; break;
1848 CASE_FLT_FN (BUILT_IN_ASIN):
1849 builtin_optab = asin_optab; break;
1850 CASE_FLT_FN (BUILT_IN_ACOS):
1851 builtin_optab = acos_optab; break;
1852 CASE_FLT_FN (BUILT_IN_TAN):
1853 builtin_optab = tan_optab; break;
1854 CASE_FLT_FN (BUILT_IN_ATAN):
1855 builtin_optab = atan_optab; break;
1856 CASE_FLT_FN (BUILT_IN_FLOOR):
1857 builtin_optab = floor_optab; break;
1858 CASE_FLT_FN (BUILT_IN_CEIL):
1859 builtin_optab = ceil_optab; break;
1860 CASE_FLT_FN (BUILT_IN_TRUNC):
1861 builtin_optab = btrunc_optab; break;
1862 CASE_FLT_FN (BUILT_IN_ROUND):
1863 builtin_optab = round_optab; break;
1864 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1865 builtin_optab = nearbyint_optab;
1866 if (flag_trapping_math)
1868 /* Else fallthrough and expand as rint. */
1869 CASE_FLT_FN (BUILT_IN_RINT):
1870 builtin_optab = rint_optab; break;
1875 /* Make a suitable register to place result in. */
1876 mode = TYPE_MODE (TREE_TYPE (exp));
1878 if (! flag_errno_math || ! HONOR_NANS (mode))
1881 /* Before working hard, check whether the instruction is available. */
1882 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1884 target = gen_reg_rtx (mode);
1886 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1887 need to expand the argument again. This way, we will not perform
1888 side-effects more the once. */
1889 narg = builtin_save_expr (arg);
1893 exp = build_call_expr (fndecl, 1, arg);
1896 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1900 /* Compute into TARGET.
1901 Set TARGET to wherever the result comes back. */
1902 target = expand_unop (mode, builtin_optab, op0, target, 0);
1907 expand_errno_check (exp, target);
1909 /* Output the entire sequence. */
1910 insns = get_insns ();
1916 /* If we were unable to expand via the builtin, stop the sequence
1917 (without outputting the insns) and call to the library function
1918 with the stabilized argument list. */
1922 before_call = get_last_insn ();
1924 target = expand_call (exp, target, target == const0_rtx);
1926 /* If this is a sqrt operation and we don't care about errno, try to
1927 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1928 This allows the semantics of the libcall to be visible to the RTL
1930 if (builtin_optab == sqrt_optab && !errno_set)
1932 /* Search backwards through the insns emitted by expand_call looking
1933 for the instruction with the REG_RETVAL note. */
1934 rtx last = get_last_insn ();
1935 while (last != before_call)
1937 if (find_reg_note (last, REG_RETVAL, NULL))
1939 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1940 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1941 two elements, i.e. symbol_ref(sqrt) and the operand. */
1943 && GET_CODE (note) == EXPR_LIST
1944 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1945 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1946 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1948 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1949 /* Check operand is a register with expected mode. */
1952 && GET_MODE (operand) == mode)
1954 /* Replace the REG_EQUAL note with a SQRT rtx. */
1955 rtx equiv = gen_rtx_SQRT (mode, operand);
1956 set_unique_reg_note (last, REG_EQUAL, equiv);
1961 last = PREV_INSN (last);
1968 /* Expand a call to the builtin binary math functions (pow and atan2).
1969 Return NULL_RTX if a normal call should be emitted rather than expanding the
1970 function in-line. EXP is the expression that is a call to the builtin
1971 function; if convenient, the result should be placed in TARGET.
1972 SUBTARGET may be used as the target for computing one of EXP's
1976 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1978 optab builtin_optab;
1979 rtx op0, op1, insns;
1980 int op1_type = REAL_TYPE;
1981 tree fndecl = get_callee_fndecl (exp);
1982 tree arg0, arg1, narg;
1983 enum machine_mode mode;
1984 bool errno_set = true;
1987 switch (DECL_FUNCTION_CODE (fndecl))
1989 CASE_FLT_FN (BUILT_IN_SCALBN):
1990 CASE_FLT_FN (BUILT_IN_SCALBLN):
1991 CASE_FLT_FN (BUILT_IN_LDEXP):
1992 op1_type = INTEGER_TYPE;
1997 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2000 arg0 = CALL_EXPR_ARG (exp, 0);
2001 arg1 = CALL_EXPR_ARG (exp, 1);
2003 switch (DECL_FUNCTION_CODE (fndecl))
2005 CASE_FLT_FN (BUILT_IN_POW):
2006 builtin_optab = pow_optab; break;
2007 CASE_FLT_FN (BUILT_IN_ATAN2):
2008 builtin_optab = atan2_optab; break;
2009 CASE_FLT_FN (BUILT_IN_SCALB):
2010 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2012 builtin_optab = scalb_optab; break;
2013 CASE_FLT_FN (BUILT_IN_SCALBN):
2014 CASE_FLT_FN (BUILT_IN_SCALBLN):
2015 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2017 /* Fall through... */
2018 CASE_FLT_FN (BUILT_IN_LDEXP):
2019 builtin_optab = ldexp_optab; break;
2020 CASE_FLT_FN (BUILT_IN_FMOD):
2021 builtin_optab = fmod_optab; break;
2022 CASE_FLT_FN (BUILT_IN_REMAINDER):
2023 CASE_FLT_FN (BUILT_IN_DREM):
2024 builtin_optab = remainder_optab; break;
2029 /* Make a suitable register to place result in. */
2030 mode = TYPE_MODE (TREE_TYPE (exp));
2032 /* Before working hard, check whether the instruction is available. */
2033 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2036 target = gen_reg_rtx (mode);
2038 if (! flag_errno_math || ! HONOR_NANS (mode))
2041 /* Always stabilize the argument list. */
2042 narg = builtin_save_expr (arg1);
2048 narg = builtin_save_expr (arg0);
2056 exp = build_call_expr (fndecl, 2, arg0, arg1);
2058 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2059 op1 = expand_normal (arg1);
2063 /* Compute into TARGET.
2064 Set TARGET to wherever the result comes back. */
2065 target = expand_binop (mode, builtin_optab, op0, op1,
2066 target, 0, OPTAB_DIRECT);
2068 /* If we were unable to expand via the builtin, stop the sequence
2069 (without outputting the insns) and call to the library function
2070 with the stabilized argument list. */
2074 return expand_call (exp, target, target == const0_rtx);
2078 expand_errno_check (exp, target);
2080 /* Output the entire sequence. */
2081 insns = get_insns ();
2088 /* Expand a call to the builtin sin and cos math functions.
2089 Return NULL_RTX if a normal call should be emitted rather than expanding the
2090 function in-line. EXP is the expression that is a call to the builtin
2091 function; if convenient, the result should be placed in TARGET.
2092 SUBTARGET may be used as the target for computing one of EXP's
2096 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2098 optab builtin_optab;
2100 tree fndecl = get_callee_fndecl (exp);
2101 enum machine_mode mode;
2104 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2107 arg = CALL_EXPR_ARG (exp, 0);
2109 switch (DECL_FUNCTION_CODE (fndecl))
2111 CASE_FLT_FN (BUILT_IN_SIN):
2112 CASE_FLT_FN (BUILT_IN_COS):
2113 builtin_optab = sincos_optab; break;
2118 /* Make a suitable register to place result in. */
2119 mode = TYPE_MODE (TREE_TYPE (exp));
2121 /* Check if sincos insn is available, otherwise fallback
2122 to sin or cos insn. */
2123 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_SIN):
2127 builtin_optab = sin_optab; break;
2128 CASE_FLT_FN (BUILT_IN_COS):
2129 builtin_optab = cos_optab; break;
2134 /* Before working hard, check whether the instruction is available. */
2135 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2137 target = gen_reg_rtx (mode);
2139 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2140 need to expand the argument again. This way, we will not perform
2141 side-effects more the once. */
2142 narg = save_expr (arg);
2146 exp = build_call_expr (fndecl, 1, arg);
2149 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2153 /* Compute into TARGET.
2154 Set TARGET to wherever the result comes back. */
2155 if (builtin_optab == sincos_optab)
2159 switch (DECL_FUNCTION_CODE (fndecl))
2161 CASE_FLT_FN (BUILT_IN_SIN):
2162 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2164 CASE_FLT_FN (BUILT_IN_COS):
2165 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2170 gcc_assert (result);
2174 target = expand_unop (mode, builtin_optab, op0, target, 0);
2179 /* Output the entire sequence. */
2180 insns = get_insns ();
2186 /* If we were unable to expand via the builtin, stop the sequence
2187 (without outputting the insns) and call to the library function
2188 with the stabilized argument list. */
2192 target = expand_call (exp, target, target == const0_rtx);
2197 /* Expand a call to one of the builtin math functions that operate on
2198 floating point argument and output an integer result (ilogb, isinf,
2200 Return 0 if a normal call should be emitted rather than expanding the
2201 function in-line. EXP is the expression that is a call to the builtin
2202 function; if convenient, the result should be placed in TARGET.
2203 SUBTARGET may be used as the target for computing one of EXP's operands. */
2206 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2208 optab builtin_optab = 0;
2209 enum insn_code icode = CODE_FOR_nothing;
2211 tree fndecl = get_callee_fndecl (exp);
2212 enum machine_mode mode;
2213 bool errno_set = false;
2216 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2219 arg = CALL_EXPR_ARG (exp, 0);
2221 switch (DECL_FUNCTION_CODE (fndecl))
2223 CASE_FLT_FN (BUILT_IN_ILOGB):
2224 errno_set = true; builtin_optab = ilogb_optab; break;
2225 CASE_FLT_FN (BUILT_IN_ISINF):
2226 builtin_optab = isinf_optab; break;
2227 case BUILT_IN_ISNORMAL:
2228 case BUILT_IN_ISFINITE:
2229 CASE_FLT_FN (BUILT_IN_FINITE):
2230 /* These builtins have no optabs (yet). */
2236 /* There's no easy way to detect the case we need to set EDOM. */
2237 if (flag_errno_math && errno_set)
2240 /* Optab mode depends on the mode of the input argument. */
2241 mode = TYPE_MODE (TREE_TYPE (arg));
2244 icode = builtin_optab->handlers[(int) mode].insn_code;
2246 /* Before working hard, check whether the instruction is available. */
2247 if (icode != CODE_FOR_nothing)
2249 /* Make a suitable register to place result in. */
2251 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2252 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2254 gcc_assert (insn_data[icode].operand[0].predicate
2255 (target, GET_MODE (target)));
2257 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2258 need to expand the argument again. This way, we will not perform
2259 side-effects more the once. */
2260 narg = builtin_save_expr (arg);
2264 exp = build_call_expr (fndecl, 1, arg);
2267 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2269 if (mode != GET_MODE (op0))
2270 op0 = convert_to_mode (mode, op0, 0);
2272 /* Compute into TARGET.
2273 Set TARGET to wherever the result comes back. */
2274 emit_unop_insn (icode, target, op0, UNKNOWN);
2278 /* If there is no optab, try generic code. */
2279 switch (DECL_FUNCTION_CODE (fndecl))
2283 CASE_FLT_FN (BUILT_IN_ISINF):
2285 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2286 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2287 tree const type = TREE_TYPE (arg);
2291 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2292 real_from_string (&r, buf);
2293 result = build_call_expr (isgr_fn, 2,
2294 fold_build1 (ABS_EXPR, type, arg),
2295 build_real (type, r));
2296 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2298 CASE_FLT_FN (BUILT_IN_FINITE):
2299 case BUILT_IN_ISFINITE:
2301 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2302 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2303 tree const type = TREE_TYPE (arg);
2307 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2308 real_from_string (&r, buf);
2309 result = build_call_expr (isle_fn, 2,
2310 fold_build1 (ABS_EXPR, type, arg),
2311 build_real (type, r));
2312 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2314 case BUILT_IN_ISNORMAL:
2316 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2317 islessequal(fabs(x),DBL_MAX). */
2318 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2319 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2320 tree const type = TREE_TYPE (arg);
2321 REAL_VALUE_TYPE rmax, rmin;
2324 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2325 real_from_string (&rmax, buf);
2326 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2327 real_from_string (&rmin, buf);
2328 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2329 result = build_call_expr (isle_fn, 2, arg,
2330 build_real (type, rmax));
2331 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2332 build_call_expr (isge_fn, 2, arg,
2333 build_real (type, rmin)));
2334 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2340 target = expand_call (exp, target, target == const0_rtx);
2345 /* Expand a call to the builtin sincos math function.
2346 Return NULL_RTX if a normal call should be emitted rather than expanding the
2347 function in-line. EXP is the expression that is a call to the builtin
2351 expand_builtin_sincos (tree exp)
2353 rtx op0, op1, op2, target1, target2;
2354 enum machine_mode mode;
2355 tree arg, sinp, cosp;
2358 if (!validate_arglist (exp, REAL_TYPE,
2359 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2362 arg = CALL_EXPR_ARG (exp, 0);
2363 sinp = CALL_EXPR_ARG (exp, 1);
2364 cosp = CALL_EXPR_ARG (exp, 2);
2366 /* Make a suitable register to place result in. */
2367 mode = TYPE_MODE (TREE_TYPE (arg));
2369 /* Check if sincos insn is available, otherwise emit the call. */
2370 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2373 target1 = gen_reg_rtx (mode);
2374 target2 = gen_reg_rtx (mode);
2376 op0 = expand_normal (arg);
2377 op1 = expand_normal (build_fold_indirect_ref (sinp));
2378 op2 = expand_normal (build_fold_indirect_ref (cosp));
2380 /* Compute into target1 and target2.
2381 Set TARGET to wherever the result comes back. */
2382 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2383 gcc_assert (result);
2385 /* Move target1 and target2 to the memory locations indicated
2387 emit_move_insn (op1, target1);
2388 emit_move_insn (op2, target2);
2393 /* Expand a call to the internal cexpi builtin to the sincos math function.
2394 EXP is the expression that is a call to the builtin function; if convenient,
2395 the result should be placed in TARGET. SUBTARGET may be used as the target
2396 for computing one of EXP's operands. */
2399 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2401 tree fndecl = get_callee_fndecl (exp);
2403 enum machine_mode mode;
2406 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2409 arg = CALL_EXPR_ARG (exp, 0);
2410 type = TREE_TYPE (arg);
2411 mode = TYPE_MODE (TREE_TYPE (arg));
2413 /* Try expanding via a sincos optab, fall back to emitting a libcall
2414 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2415 is only generated from sincos, cexp or if we have either of them. */
2416 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2418 op1 = gen_reg_rtx (mode);
2419 op2 = gen_reg_rtx (mode);
2421 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2423 /* Compute into op1 and op2. */
2424 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2426 else if (TARGET_HAS_SINCOS)
2428 tree call, fn = NULL_TREE;
2432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2433 fn = built_in_decls[BUILT_IN_SINCOSF];
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2435 fn = built_in_decls[BUILT_IN_SINCOS];
2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2437 fn = built_in_decls[BUILT_IN_SINCOSL];
2441 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2442 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2444 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2445 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2446 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2448 /* Make sure not to fold the sincos call again. */
2449 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2450 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2451 call, 3, arg, top1, top2));
2455 tree call, fn = NULL_TREE, narg;
2456 tree ctype = build_complex_type (type);
2458 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2459 fn = built_in_decls[BUILT_IN_CEXPF];
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2461 fn = built_in_decls[BUILT_IN_CEXP];
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2463 fn = built_in_decls[BUILT_IN_CEXPL];
2467 /* If we don't have a decl for cexp create one. This is the
2468 friendliest fallback if the user calls __builtin_cexpi
2469 without full target C99 function support. */
2470 if (fn == NULL_TREE)
2473 const char *name = NULL;
2475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2482 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2483 fn = build_fn_decl (name, fntype);
2486 narg = fold_build2 (COMPLEX_EXPR, ctype,
2487 build_real (type, dconst0), arg);
2489 /* Make sure not to fold the cexp call again. */
2490 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2491 return expand_expr (build_call_nary (ctype, call, 1, narg),
2492 target, VOIDmode, EXPAND_NORMAL);
2495 /* Now build the proper return type. */
2496 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2497 make_tree (TREE_TYPE (arg), op2),
2498 make_tree (TREE_TYPE (arg), op1)),
2499 target, VOIDmode, EXPAND_NORMAL);
2502 /* Expand a call to one of the builtin rounding functions gcc defines
2503 as an extension (lfloor and lceil). As these are gcc extensions we
2504 do not need to worry about setting errno to EDOM.
2505 If expanding via optab fails, lower expression to (int)(floor(x)).
2506 EXP is the expression that is a call to the builtin function;
2507 if convenient, the result should be placed in TARGET. SUBTARGET may
2508 be used as the target for computing one of EXP's operands. */
2511 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2513 convert_optab builtin_optab;
2514 rtx op0, insns, tmp;
2515 tree fndecl = get_callee_fndecl (exp);
2516 enum built_in_function fallback_fn;
2517 tree fallback_fndecl;
2518 enum machine_mode mode;
2521 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2524 arg = CALL_EXPR_ARG (exp, 0);
2526 switch (DECL_FUNCTION_CODE (fndecl))
2528 CASE_FLT_FN (BUILT_IN_LCEIL):
2529 CASE_FLT_FN (BUILT_IN_LLCEIL):
2530 builtin_optab = lceil_optab;
2531 fallback_fn = BUILT_IN_CEIL;
2534 CASE_FLT_FN (BUILT_IN_LFLOOR):
2535 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2536 builtin_optab = lfloor_optab;
2537 fallback_fn = BUILT_IN_FLOOR;
2544 /* Make a suitable register to place result in. */
2545 mode = TYPE_MODE (TREE_TYPE (exp));
2547 target = gen_reg_rtx (mode);
2549 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2550 need to expand the argument again. This way, we will not perform
2551 side-effects more the once. */
2552 narg = builtin_save_expr (arg);
2556 exp = build_call_expr (fndecl, 1, arg);
2559 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2563 /* Compute into TARGET. */
2564 if (expand_sfix_optab (target, op0, builtin_optab))
2566 /* Output the entire sequence. */
2567 insns = get_insns ();
2573 /* If we were unable to expand via the builtin, stop the sequence
2574 (without outputting the insns). */
2577 /* Fall back to floating point rounding optab. */
2578 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2580 /* For non-C99 targets we may end up without a fallback fndecl here
2581 if the user called __builtin_lfloor directly. In this case emit
2582 a call to the floor/ceil variants nevertheless. This should result
2583 in the best user experience for not full C99 targets. */
2584 if (fallback_fndecl == NULL_TREE)
2587 const char *name = NULL;
2589 switch (DECL_FUNCTION_CODE (fndecl))
2591 case BUILT_IN_LCEIL:
2592 case BUILT_IN_LLCEIL:
2595 case BUILT_IN_LCEILF:
2596 case BUILT_IN_LLCEILF:
2599 case BUILT_IN_LCEILL:
2600 case BUILT_IN_LLCEILL:
2603 case BUILT_IN_LFLOOR:
2604 case BUILT_IN_LLFLOOR:
2607 case BUILT_IN_LFLOORF:
2608 case BUILT_IN_LLFLOORF:
2611 case BUILT_IN_LFLOORL:
2612 case BUILT_IN_LLFLOORL:
2619 fntype = build_function_type_list (TREE_TYPE (arg),
2620 TREE_TYPE (arg), NULL_TREE);
2621 fallback_fndecl = build_fn_decl (name, fntype);
2624 exp = build_call_expr (fallback_fndecl, 1, arg);
2626 tmp = expand_normal (exp);
2628 /* Truncate the result of floating point optab to integer
2629 via expand_fix (). */
2630 target = gen_reg_rtx (mode);
2631 expand_fix (target, tmp, 0);
2636 /* Expand a call to one of the builtin math functions doing integer
2638 Return 0 if a normal call should be emitted rather than expanding the
2639 function in-line. EXP is the expression that is a call to the builtin
2640 function; if convenient, the result should be placed in TARGET.
2641 SUBTARGET may be used as the target for computing one of EXP's operands. */
2644 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2646 convert_optab builtin_optab;
2648 tree fndecl = get_callee_fndecl (exp);
2650 enum machine_mode mode;
2652 /* There's no easy way to detect the case we need to set EDOM. */
2653 if (flag_errno_math)
2656 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2659 arg = CALL_EXPR_ARG (exp, 0);
2661 switch (DECL_FUNCTION_CODE (fndecl))
2663 CASE_FLT_FN (BUILT_IN_LRINT):
2664 CASE_FLT_FN (BUILT_IN_LLRINT):
2665 builtin_optab = lrint_optab; break;
2666 CASE_FLT_FN (BUILT_IN_LROUND):
2667 CASE_FLT_FN (BUILT_IN_LLROUND):
2668 builtin_optab = lround_optab; break;
2673 /* Make a suitable register to place result in. */
2674 mode = TYPE_MODE (TREE_TYPE (exp));
2676 target = gen_reg_rtx (mode);
2678 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2679 need to expand the argument again. This way, we will not perform
2680 side-effects more the once. */
2681 narg = builtin_save_expr (arg);
2685 exp = build_call_expr (fndecl, 1, arg);
2688 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2692 if (expand_sfix_optab (target, op0, builtin_optab))
2694 /* Output the entire sequence. */
2695 insns = get_insns ();
2701 /* If we were unable to expand via the builtin, stop the sequence
2702 (without outputting the insns) and call to the library function
2703 with the stabilized argument list. */
2706 target = expand_call (exp, target, target == const0_rtx);
2711 /* To evaluate powi(x,n), the floating point value x raised to the
2712 constant integer exponent n, we use a hybrid algorithm that
2713 combines the "window method" with look-up tables. For an
2714 introduction to exponentiation algorithms and "addition chains",
2715 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2716 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2717 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2718 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2720 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2721 multiplications to inline before calling the system library's pow
2722 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2723 so this default never requires calling pow, powf or powl. */
2725 #ifndef POWI_MAX_MULTS
2726 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2729 /* The size of the "optimal power tree" lookup table. All
2730 exponents less than this value are simply looked up in the
2731 powi_table below. This threshold is also used to size the
2732 cache of pseudo registers that hold intermediate results. */
2733 #define POWI_TABLE_SIZE 256
2735 /* The size, in bits of the window, used in the "window method"
2736 exponentiation algorithm. This is equivalent to a radix of
2737 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2738 #define POWI_WINDOW_SIZE 3
2740 /* The following table is an efficient representation of an
2741 "optimal power tree". For each value, i, the corresponding
2742 value, j, in the table states than an optimal evaluation
2743 sequence for calculating pow(x,i) can be found by evaluating
2744 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2745 100 integers is given in Knuth's "Seminumerical algorithms". */
2747 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2749 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2750 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2751 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2752 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2753 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2754 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2755 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2756 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2757 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2758 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2759 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2760 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2761 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2762 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2763 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2764 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2765 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2766 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2767 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2768 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2769 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2770 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2771 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2772 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2773 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2774 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2775 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2776 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2777 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2778 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2779 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2780 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2784 /* Return the number of multiplications required to calculate
2785 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2786 subroutine of powi_cost. CACHE is an array indicating
2787 which exponents have already been calculated. */
2790 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2792 /* If we've already calculated this exponent, then this evaluation
2793 doesn't require any additional multiplications. */
2798 return powi_lookup_cost (n - powi_table[n], cache)
2799 + powi_lookup_cost (powi_table[n], cache) + 1;
2802 /* Return the number of multiplications required to calculate
2803 powi(x,n) for an arbitrary x, given the exponent N. This
2804 function needs to be kept in sync with expand_powi below. */
2807 powi_cost (HOST_WIDE_INT n)
2809 bool cache[POWI_TABLE_SIZE];
2810 unsigned HOST_WIDE_INT digit;
2811 unsigned HOST_WIDE_INT val;
2817 /* Ignore the reciprocal when calculating the cost. */
2818 val = (n < 0) ? -n : n;
2820 /* Initialize the exponent cache. */
2821 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2826 while (val >= POWI_TABLE_SIZE)
2830 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2831 result += powi_lookup_cost (digit, cache)
2832 + POWI_WINDOW_SIZE + 1;
2833 val >>= POWI_WINDOW_SIZE;
2842 return result + powi_lookup_cost (val, cache);
2845 /* Recursive subroutine of expand_powi. This function takes the array,
2846 CACHE, of already calculated exponents and an exponent N and returns
2847 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2850 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2852 unsigned HOST_WIDE_INT digit;
2856 if (n < POWI_TABLE_SIZE)
2861 target = gen_reg_rtx (mode);
2864 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2865 op1 = expand_powi_1 (mode, powi_table[n], cache);
2869 target = gen_reg_rtx (mode);
2870 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2871 op0 = expand_powi_1 (mode, n - digit, cache);
2872 op1 = expand_powi_1 (mode, digit, cache);
2876 target = gen_reg_rtx (mode);
2877 op0 = expand_powi_1 (mode, n >> 1, cache);
2881 result = expand_mult (mode, op0, op1, target, 0);
2882 if (result != target)
2883 emit_move_insn (target, result);
2887 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2888 floating point operand in mode MODE, and N is the exponent. This
2889 function needs to be kept in sync with powi_cost above. */
2892 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2894 unsigned HOST_WIDE_INT val;
2895 rtx cache[POWI_TABLE_SIZE];
2899 return CONST1_RTX (mode);
2901 val = (n < 0) ? -n : n;
2903 memset (cache, 0, sizeof (cache));
2906 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2908 /* If the original exponent was negative, reciprocate the result. */
2910 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2911 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2916 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2917 a normal call should be emitted rather than expanding the function
2918 in-line. EXP is the expression that is a call to the builtin
2919 function; if convenient, the result should be placed in TARGET. */
2922 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2926 tree type = TREE_TYPE (exp);
2927 REAL_VALUE_TYPE cint, c, c2;
2930 enum machine_mode mode = TYPE_MODE (type);
2932 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2935 arg0 = CALL_EXPR_ARG (exp, 0);
2936 arg1 = CALL_EXPR_ARG (exp, 1);
2938 if (TREE_CODE (arg1) != REAL_CST
2939 || TREE_OVERFLOW (arg1))
2940 return expand_builtin_mathfn_2 (exp, target, subtarget);
2942 /* Handle constant exponents. */
2944 /* For integer valued exponents we can expand to an optimal multiplication
2945 sequence using expand_powi. */
2946 c = TREE_REAL_CST (arg1);
2947 n = real_to_integer (&c);
2948 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2949 if (real_identical (&c, &cint)
2950 && ((n >= -1 && n <= 2)
2951 || (flag_unsafe_math_optimizations
2953 && powi_cost (n) <= POWI_MAX_MULTS)))
2955 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2958 op = force_reg (mode, op);
2959 op = expand_powi (op, mode, n);
2964 narg0 = builtin_save_expr (arg0);
2966 /* If the exponent is not integer valued, check if it is half of an integer.
2967 In this case we can expand to sqrt (x) * x**(n/2). */
2968 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2969 if (fn != NULL_TREE)
2971 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2972 n = real_to_integer (&c2);
2973 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2974 if (real_identical (&c2, &cint)
2975 && ((flag_unsafe_math_optimizations
2977 && powi_cost (n/2) <= POWI_MAX_MULTS)
2980 tree call_expr = build_call_expr (fn, 1, narg0);
2981 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2984 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2985 op2 = force_reg (mode, op2);
2986 op2 = expand_powi (op2, mode, abs (n / 2));
2987 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2988 0, OPTAB_LIB_WIDEN);
2989 /* If the original exponent was negative, reciprocate the
2992 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2993 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2999 /* Try if the exponent is a third of an integer. In this case
3000 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3001 different from pow (x, 1./3.) due to rounding and behavior
3002 with negative x we need to constrain this transformation to
3003 unsafe math and positive x or finite math. */
3004 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3006 && flag_unsafe_math_optimizations
3007 && (tree_expr_nonnegative_p (arg0)
3008 || !HONOR_NANS (mode)))
3010 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3011 real_round (&c2, mode, &c2);
3012 n = real_to_integer (&c2);
3013 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3014 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3015 real_convert (&c2, mode, &c2);
3016 if (real_identical (&c2, &c)
3018 && powi_cost (n/3) <= POWI_MAX_MULTS)
3021 tree call_expr = build_call_expr (fn, 1,narg0);
3022 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3023 if (abs (n) % 3 == 2)
3024 op = expand_simple_binop (mode, MULT, op, op, op,
3025 0, OPTAB_LIB_WIDEN);
3028 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3029 op2 = force_reg (mode, op2);
3030 op2 = expand_powi (op2, mode, abs (n / 3));
3031 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3032 0, OPTAB_LIB_WIDEN);
3033 /* If the original exponent was negative, reciprocate the
3036 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3037 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3043 /* Fall back to optab expansion. */
3044 return expand_builtin_mathfn_2 (exp, target, subtarget);
3047 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3048 a normal call should be emitted rather than expanding the function
3049 in-line. EXP is the expression that is a call to the builtin
3050 function; if convenient, the result should be placed in TARGET. */
3053 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3057 enum machine_mode mode;
3058 enum machine_mode mode2;
3060 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3063 arg0 = CALL_EXPR_ARG (exp, 0);
3064 arg1 = CALL_EXPR_ARG (exp, 1);
3065 mode = TYPE_MODE (TREE_TYPE (exp));
3067 /* Handle constant power. */
3069 if (TREE_CODE (arg1) == INTEGER_CST
3070 && !TREE_OVERFLOW (arg1))
3072 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3074 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3075 Otherwise, check the number of multiplications required. */
3076 if ((TREE_INT_CST_HIGH (arg1) == 0
3077 || TREE_INT_CST_HIGH (arg1) == -1)
3078 && ((n >= -1 && n <= 2)
3080 && powi_cost (n) <= POWI_MAX_MULTS)))
3082 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3083 op0 = force_reg (mode, op0);
3084 return expand_powi (op0, mode, n);
3088 /* Emit a libcall to libgcc. */
3090 /* Mode of the 2nd argument must match that of an int. */
3091 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3093 if (target == NULL_RTX)
3094 target = gen_reg_rtx (mode);
3096 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3097 if (GET_MODE (op0) != mode)
3098 op0 = convert_to_mode (mode, op0, 0);
3099 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3100 if (GET_MODE (op1) != mode2)
3101 op1 = convert_to_mode (mode2, op1, 0);
3103 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3104 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3105 op0, mode, op1, mode2);
3110 /* Expand expression EXP which is a call to the strlen builtin. Return
3111 NULL_RTX if we failed the caller should emit a normal call, otherwise
3112 try to get the result in TARGET, if convenient. */
3115 expand_builtin_strlen (tree exp, rtx target,
3116 enum machine_mode target_mode)
3118 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3124 tree src = CALL_EXPR_ARG (exp, 0);
3125 rtx result, src_reg, char_rtx, before_strlen;
3126 enum machine_mode insn_mode = target_mode, char_mode;
3127 enum insn_code icode = CODE_FOR_nothing;
3130 /* If the length can be computed at compile-time, return it. */
3131 len = c_strlen (src, 0);
3133 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3135 /* If the length can be computed at compile-time and is constant
3136 integer, but there are side-effects in src, evaluate
3137 src for side-effects, then return len.
3138 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3139 can be optimized into: i++; x = 3; */
3140 len = c_strlen (src, 1);
3141 if (len && TREE_CODE (len) == INTEGER_CST)
3143 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3144 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3147 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3149 /* If SRC is not a pointer type, don't do this operation inline. */
3153 /* Bail out if we can't compute strlen in the right mode. */
3154 while (insn_mode != VOIDmode)
3156 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3157 if (icode != CODE_FOR_nothing)
3160 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3162 if (insn_mode == VOIDmode)
3165 /* Make a place to write the result of the instruction. */
3169 && GET_MODE (result) == insn_mode
3170 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3171 result = gen_reg_rtx (insn_mode);
3173 /* Make a place to hold the source address. We will not expand
3174 the actual source until we are sure that the expansion will
3175 not fail -- there are trees that cannot be expanded twice. */
3176 src_reg = gen_reg_rtx (Pmode);
3178 /* Mark the beginning of the strlen sequence so we can emit the
3179 source operand later. */
3180 before_strlen = get_last_insn ();
3182 char_rtx = const0_rtx;
3183 char_mode = insn_data[(int) icode].operand[2].mode;
3184 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3186 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3188 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3189 char_rtx, GEN_INT (align));
3194 /* Now that we are assured of success, expand the source. */
3196 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3198 emit_move_insn (src_reg, pat);
3203 emit_insn_after (pat, before_strlen);
3205 emit_insn_before (pat, get_insns ());
3207 /* Return the value in the proper mode for this function. */
3208 if (GET_MODE (result) == target_mode)
3210 else if (target != 0)
3211 convert_move (target, result, 0);
3213 target = convert_to_mode (target_mode, result, 0);
3219 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3220 caller should emit a normal call, otherwise try to get the result
3221 in TARGET, if convenient (and in mode MODE if that's convenient). */
3224 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3226 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3228 tree type = TREE_TYPE (exp);
3229 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3230 CALL_EXPR_ARG (exp, 1), type);
3232 return expand_expr (result, target, mode, EXPAND_NORMAL);
3237 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3238 caller should emit a normal call, otherwise try to get the result
3239 in TARGET, if convenient (and in mode MODE if that's convenient). */
3242 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3244 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3246 tree type = TREE_TYPE (exp);
3247 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3248 CALL_EXPR_ARG (exp, 1), type);
3250 return expand_expr (result, target, mode, EXPAND_NORMAL);
3252 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3257 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3258 caller should emit a normal call, otherwise try to get the result
3259 in TARGET, if convenient (and in mode MODE if that's convenient). */
3262 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3264 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3266 tree type = TREE_TYPE (exp);
3267 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3268 CALL_EXPR_ARG (exp, 1), type);
3270 return expand_expr (result, target, mode, EXPAND_NORMAL);
3275 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3276 caller should emit a normal call, otherwise try to get the result
3277 in TARGET, if convenient (and in mode MODE if that's convenient). */
3280 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3282 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3284 tree type = TREE_TYPE (exp);
3285 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3286 CALL_EXPR_ARG (exp, 1), type);
3288 return expand_expr (result, target, mode, EXPAND_NORMAL);
3293 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3294 bytes from constant string DATA + OFFSET and return it as target
3298 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3299 enum machine_mode mode)
3301 const char *str = (const char *) data;
3303 gcc_assert (offset >= 0
3304 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3305 <= strlen (str) + 1));
3307 return c_readstr (str + offset, mode);
3310 /* Expand a call EXP to the memcpy builtin.
3311 Return NULL_RTX if we failed, the caller should emit a normal call,
3312 otherwise try to get the result in TARGET, if convenient (and in
3313 mode MODE if that's convenient). */
3316 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3318 tree fndecl = get_callee_fndecl (exp);
3320 if (!validate_arglist (exp,
3321 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3325 tree dest = CALL_EXPR_ARG (exp, 0);
3326 tree src = CALL_EXPR_ARG (exp, 1);
3327 tree len = CALL_EXPR_ARG (exp, 2);
3328 const char *src_str;
3329 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3330 unsigned int dest_align
3331 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3332 rtx dest_mem, src_mem, dest_addr, len_rtx;
3333 tree result = fold_builtin_memory_op (dest, src, len,
3334 TREE_TYPE (TREE_TYPE (fndecl)),
3336 HOST_WIDE_INT expected_size = -1;
3337 unsigned int expected_align = 0;
3341 while (TREE_CODE (result) == COMPOUND_EXPR)
3343 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3345 result = TREE_OPERAND (result, 1);
3347 return expand_expr (result, target, mode, EXPAND_NORMAL);
3350 /* If DEST is not a pointer type, call the normal function. */
3351 if (dest_align == 0)
3354 /* If either SRC is not a pointer type, don't do this
3355 operation in-line. */
3359 stringop_block_profile (exp, &expected_align, &expected_size);
3360 if (expected_align < dest_align)
3361 expected_align = dest_align;
3362 dest_mem = get_memory_rtx (dest, len);
3363 set_mem_align (dest_mem, dest_align);
3364 len_rtx = expand_normal (len);
3365 src_str = c_getstr (src);
3367 /* If SRC is a string constant and block move would be done
3368 by pieces, we can avoid loading the string from memory
3369 and only stored the computed constants. */
3371 && GET_CODE (len_rtx) == CONST_INT
3372 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3373 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3374 (void *) src_str, dest_align))
3376 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3377 builtin_memcpy_read_str,
3378 (void *) src_str, dest_align, 0);
3379 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3380 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3384 src_mem = get_memory_rtx (src, len);
3385 set_mem_align (src_mem, src_align);
3387 /* Copy word part most expediently. */
3388 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3389 CALL_EXPR_TAILCALL (exp)
3390 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3391 expected_align, expected_size);
3395 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3396 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3402 /* Expand a call EXP to the mempcpy builtin.
3403 Return NULL_RTX if we failed; the caller should emit a normal call,
3404 otherwise try to get the result in TARGET, if convenient (and in
3405 mode MODE if that's convenient). If ENDP is 0 return the
3406 destination pointer, if ENDP is 1 return the end pointer ala
3407 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3411 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3413 if (!validate_arglist (exp,
3414 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3418 tree dest = CALL_EXPR_ARG (exp, 0);
3419 tree src = CALL_EXPR_ARG (exp, 1);
3420 tree len = CALL_EXPR_ARG (exp, 2);
3421 return expand_builtin_mempcpy_args (dest, src, len,
3423 target, mode, /*endp=*/ 1);
3427 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3428 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3429 so that this can also be called without constructing an actual CALL_EXPR.
3430 TYPE is the return type of the call. The other arguments and return value
3431 are the same as for expand_builtin_mempcpy. */
3434 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3435 rtx target, enum machine_mode mode, int endp)
3437 /* If return value is ignored, transform mempcpy into memcpy. */
3438 if (target == const0_rtx)
3440 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3445 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3446 target, mode, EXPAND_NORMAL);
3450 const char *src_str;
3451 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3452 unsigned int dest_align
3453 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3454 rtx dest_mem, src_mem, len_rtx;
3455 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3459 while (TREE_CODE (result) == COMPOUND_EXPR)
3461 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3463 result = TREE_OPERAND (result, 1);
3465 return expand_expr (result, target, mode, EXPAND_NORMAL);
3468 /* If either SRC or DEST is not a pointer type, don't do this
3469 operation in-line. */
3470 if (dest_align == 0 || src_align == 0)
3473 /* If LEN is not constant, call the normal function. */
3474 if (! host_integerp (len, 1))
3477 len_rtx = expand_normal (len);
3478 src_str = c_getstr (src);
3480 /* If SRC is a string constant and block move would be done
3481 by pieces, we can avoid loading the string from memory
3482 and only stored the computed constants. */
3484 && GET_CODE (len_rtx) == CONST_INT
3485 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3486 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3487 (void *) src_str, dest_align))
3489 dest_mem = get_memory_rtx (dest, len);
3490 set_mem_align (dest_mem, dest_align);
3491 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3492 builtin_memcpy_read_str,
3493 (void *) src_str, dest_align, endp);
3494 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3495 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3499 if (GET_CODE (len_rtx) == CONST_INT
3500 && can_move_by_pieces (INTVAL (len_rtx),
3501 MIN (dest_align, src_align)))
3503 dest_mem = get_memory_rtx (dest, len);
3504 set_mem_align (dest_mem, dest_align);
3505 src_mem = get_memory_rtx (src, len);
3506 set_mem_align (src_mem, src_align);
3507 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3508 MIN (dest_align, src_align), endp);
3509 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3510 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3518 /* Expand expression EXP, which is a call to the memmove builtin. Return
3519 NULL_RTX if we failed; the caller should emit a normal call. */
3522 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3524 if (!validate_arglist (exp,
3525 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3529 tree dest = CALL_EXPR_ARG (exp, 0);
3530 tree src = CALL_EXPR_ARG (exp, 1);
3531 tree len = CALL_EXPR_ARG (exp, 2);
3532 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3533 target, mode, ignore);
3537 /* Helper function to do the actual work for expand_builtin_memmove. The
3538 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3539 so that this can also be called without constructing an actual CALL_EXPR.
3540 TYPE is the return type of the call. The other arguments and return value
3541 are the same as for expand_builtin_memmove. */
3544 expand_builtin_memmove_args (tree dest, tree src, tree len,
3545 tree type, rtx target, enum machine_mode mode,
3548 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3552 STRIP_TYPE_NOPS (result);
3553 while (TREE_CODE (result) == COMPOUND_EXPR)
3555 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3557 result = TREE_OPERAND (result, 1);
3559 return expand_expr (result, target, mode, EXPAND_NORMAL);
3562 /* Otherwise, call the normal function. */
3566 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3567 NULL_RTX if we failed the caller should emit a normal call. */
3570 expand_builtin_bcopy (tree exp, int ignore)
3572 tree type = TREE_TYPE (exp);
3573 tree src, dest, size;
3575 if (!validate_arglist (exp,
3576 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3579 src = CALL_EXPR_ARG (exp, 0);
3580 dest = CALL_EXPR_ARG (exp, 1);
3581 size = CALL_EXPR_ARG (exp, 2);
3583 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3584 This is done this way so that if it isn't expanded inline, we fall
3585 back to calling bcopy instead of memmove. */
3586 return expand_builtin_memmove_args (dest, src,
3587 fold_convert (sizetype, size),
3588 type, const0_rtx, VOIDmode,
3593 # define HAVE_movstr 0
3594 # define CODE_FOR_movstr CODE_FOR_nothing
3597 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3598 we failed, the caller should emit a normal call, otherwise try to
3599 get the result in TARGET, if convenient. If ENDP is 0 return the
3600 destination pointer, if ENDP is 1 return the end pointer ala
3601 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3605 expand_movstr (tree dest, tree src, rtx target, int endp)
3611 const struct insn_data * data;
3616 dest_mem = get_memory_rtx (dest, NULL);
3617 src_mem = get_memory_rtx (src, NULL);
3620 target = force_reg (Pmode, XEXP (dest_mem, 0));
3621 dest_mem = replace_equiv_address (dest_mem, target);
3622 end = gen_reg_rtx (Pmode);
3626 if (target == 0 || target == const0_rtx)
3628 end = gen_reg_rtx (Pmode);
3636 data = insn_data + CODE_FOR_movstr;
3638 if (data->operand[0].mode != VOIDmode)
3639 end = gen_lowpart (data->operand[0].mode, end);
3641 insn = data->genfun (end, dest_mem, src_mem);
3647 /* movstr is supposed to set end to the address of the NUL
3648 terminator. If the caller requested a mempcpy-like return value,
3650 if (endp == 1 && target != const0_rtx)
3652 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3653 emit_move_insn (target, force_operand (tem, NULL_RTX));
3659 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3660 NULL_RTX if we failed the caller should emit a normal call, otherwise
3661 try to get the result in TARGET, if convenient (and in mode MODE if that's
3665 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3667 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3669 tree dest = CALL_EXPR_ARG (exp, 0);
3670 tree src = CALL_EXPR_ARG (exp, 1);
3671 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3676 /* Helper function to do the actual work for expand_builtin_strcpy. The
3677 arguments to the builtin_strcpy call DEST and SRC are broken out
3678 so that this can also be called without constructing an actual CALL_EXPR.
3679 The other arguments and return value are the same as for
3680 expand_builtin_strcpy. */
3683 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3684 rtx target, enum machine_mode mode)
3686 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3688 return expand_expr (result, target, mode, EXPAND_NORMAL);
3689 return expand_movstr (dest, src, target, /*endp=*/0);
3693 /* Expand a call EXP to the stpcpy builtin.
3694 Return NULL_RTX if we failed the caller should emit a normal call,
3695 otherwise try to get the result in TARGET, if convenient (and in
3696 mode MODE if that's convenient). */
3699 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3703 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3706 dst = CALL_EXPR_ARG (exp, 0);
3707 src = CALL_EXPR_ARG (exp, 1);
3709 /* If return value is ignored, transform stpcpy into strcpy. */
3710 if (target == const0_rtx)
3712 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3716 return expand_expr (build_call_expr (fn, 2, dst, src),
3717 target, mode, EXPAND_NORMAL);
3724 /* Ensure we get an actual string whose length can be evaluated at
3725 compile-time, not an expression containing a string. This is
3726 because the latter will potentially produce pessimized code
3727 when used to produce the return value. */
3728 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3729 return expand_movstr (dst, src, target, /*endp=*/2);
3731 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3732 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3733 target, mode, /*endp=*/2);
3738 if (TREE_CODE (len) == INTEGER_CST)
3740 rtx len_rtx = expand_normal (len);
3742 if (GET_CODE (len_rtx) == CONST_INT)
3744 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3745 dst, src, target, mode);
3751 if (mode != VOIDmode)
3752 target = gen_reg_rtx (mode);
3754 target = gen_reg_rtx (GET_MODE (ret));
3756 if (GET_MODE (target) != GET_MODE (ret))
3757 ret = gen_lowpart (GET_MODE (target), ret);
3759 ret = plus_constant (ret, INTVAL (len_rtx));
3760 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3768 return expand_movstr (dst, src, target, /*endp=*/2);
3772 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3773 bytes from constant string DATA + OFFSET and return it as target
3777 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3778 enum machine_mode mode)
3780 const char *str = (const char *) data;
3782 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3785 return c_readstr (str + offset, mode);
3788 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3789 NULL_RTX if we failed the caller should emit a normal call. */
3792 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3794 tree fndecl = get_callee_fndecl (exp);
3796 if (validate_arglist (exp,
3797 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3799 tree dest = CALL_EXPR_ARG (exp, 0);
3800 tree src = CALL_EXPR_ARG (exp, 1);
3801 tree len = CALL_EXPR_ARG (exp, 2);
3802 tree slen = c_strlen (src, 1);
3803 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3807 while (TREE_CODE (result) == COMPOUND_EXPR)
3809 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3811 result = TREE_OPERAND (result, 1);
3813 return expand_expr (result, target, mode, EXPAND_NORMAL);
3816 /* We must be passed a constant len and src parameter. */
3817 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3820 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3822 /* We're required to pad with trailing zeros if the requested
3823 len is greater than strlen(s2)+1. In that case try to
3824 use store_by_pieces, if it fails, punt. */
3825 if (tree_int_cst_lt (slen, len))
3827 unsigned int dest_align
3828 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3829 const char *p = c_getstr (src);
3832 if (!p || dest_align == 0 || !host_integerp (len, 1)
3833 || !can_store_by_pieces (tree_low_cst (len, 1),
3834 builtin_strncpy_read_str,
3835 (void *) p, dest_align))
3838 dest_mem = get_memory_rtx (dest, len);
3839 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3840 builtin_strncpy_read_str,
3841 (void *) p, dest_align, 0);
3842 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3843 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3850 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3851 bytes from constant string DATA + OFFSET and return it as target
3855 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3856 enum machine_mode mode)
3858 const char *c = (const char *) data;
3859 char *p = alloca (GET_MODE_SIZE (mode));
3861 memset (p, *c, GET_MODE_SIZE (mode));
3863 return c_readstr (p, mode);
3866 /* Callback routine for store_by_pieces. Return the RTL of a register
3867 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3868 char value given in the RTL register data. For example, if mode is
3869 4 bytes wide, return the RTL for 0x01010101*data. */
3872 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3873 enum machine_mode mode)
3879 size = GET_MODE_SIZE (mode);
3884 memset (p, 1, size);
3885 coeff = c_readstr (p, mode);
3887 target = convert_to_mode (mode, (rtx) data, 1);
3888 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3889 return force_reg (mode, target);
3892 /* Expand expression EXP, which is a call to the memset builtin. Return
3893 NULL_RTX if we failed the caller should emit a normal call, otherwise
3894 try to get the result in TARGET, if convenient (and in mode MODE if that's
3898 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3900 if (!validate_arglist (exp,
3901 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3905 tree dest = CALL_EXPR_ARG (exp, 0);
3906 tree val = CALL_EXPR_ARG (exp, 1);
3907 tree len = CALL_EXPR_ARG (exp, 2);
3908 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3912 /* Helper function to do the actual work for expand_builtin_memset. The
3913 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3914 so that this can also be called without constructing an actual CALL_EXPR.
3915 The other arguments and return value are the same as for
3916 expand_builtin_memset. */
3919 expand_builtin_memset_args (tree dest, tree val, tree len,
3920 rtx target, enum machine_mode mode, tree orig_exp)
3923 enum built_in_function fcode;
3925 unsigned int dest_align;
3926 rtx dest_mem, dest_addr, len_rtx;
3927 HOST_WIDE_INT expected_size = -1;
3928 unsigned int expected_align = 0;
3930 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3932 /* If DEST is not a pointer type, don't do this operation in-line. */
3933 if (dest_align == 0)
3936 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3937 if (expected_align < dest_align)
3938 expected_align = dest_align;
3940 /* If the LEN parameter is zero, return DEST. */
3941 if (integer_zerop (len))
3943 /* Evaluate and ignore VAL in case it has side-effects. */
3944 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3945 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3948 /* Stabilize the arguments in case we fail. */
3949 dest = builtin_save_expr (dest);
3950 val = builtin_save_expr (val);
3951 len = builtin_save_expr (len);
3953 len_rtx = expand_normal (len);
3954 dest_mem = get_memory_rtx (dest, len);
3956 if (TREE_CODE (val) != INTEGER_CST)
3960 val_rtx = expand_normal (val);
3961 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3964 /* Assume that we can memset by pieces if we can store
3965 * the coefficients by pieces (in the required modes).
3966 * We can't pass builtin_memset_gen_str as that emits RTL. */
3968 if (host_integerp (len, 1)
3969 && !(optimize_size && tree_low_cst (len, 1) > 1)
3970 && can_store_by_pieces (tree_low_cst (len, 1),
3971 builtin_memset_read_str, &c, dest_align))
3973 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3975 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3976 builtin_memset_gen_str, val_rtx, dest_align, 0);
3978 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3979 dest_align, expected_align,
3983 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3984 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3988 if (target_char_cast (val, &c))
3993 if (host_integerp (len, 1)
3994 && !(optimize_size && tree_low_cst (len, 1) > 1)
3995 && can_store_by_pieces (tree_low_cst (len, 1),
3996 builtin_memset_read_str, &c, dest_align))
3997 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3998 builtin_memset_read_str, &c, dest_align, 0);
3999 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4000 dest_align, expected_align,
4004 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4005 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4009 set_mem_align (dest_mem, dest_align);
4010 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4011 CALL_EXPR_TAILCALL (orig_exp)
4012 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4013 expected_align, expected_size);
4017 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4018 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4024 fndecl = get_callee_fndecl (orig_exp);
4025 fcode = DECL_FUNCTION_CODE (fndecl);
4026 if (fcode == BUILT_IN_MEMSET)
4027 fn = build_call_expr (fndecl, 3, dest, val, len);
4028 else if (fcode == BUILT_IN_BZERO)
4029 fn = build_call_expr (fndecl, 2, dest, len);
4032 if (TREE_CODE (fn) == CALL_EXPR)
4033 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4034 return expand_call (fn, target, target == const0_rtx);
4037 /* Expand expression EXP, which is a call to the bzero builtin. Return
4038 NULL_RTX if we failed the caller should emit a normal call. */
4041 expand_builtin_bzero (tree exp)
4045 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4048 dest = CALL_EXPR_ARG (exp, 0);
4049 size = CALL_EXPR_ARG (exp, 1);
4051 /* New argument list transforming bzero(ptr x, int y) to
4052 memset(ptr x, int 0, size_t y). This is done this way
4053 so that if it isn't expanded inline, we fallback to
4054 calling bzero instead of memset. */
4056 return expand_builtin_memset_args (dest, integer_zero_node,
4057 fold_convert (sizetype, size),
4058 const0_rtx, VOIDmode, exp);
4061 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4062 caller should emit a normal call, otherwise try to get the result
4063 in TARGET, if convenient (and in mode MODE if that's convenient). */
4066 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4068 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4069 INTEGER_TYPE, VOID_TYPE))
4071 tree type = TREE_TYPE (exp);
4072 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4073 CALL_EXPR_ARG (exp, 1),
4074 CALL_EXPR_ARG (exp, 2), type);
4076 return expand_expr (result, target, mode, EXPAND_NORMAL);
4081 /* Expand expression EXP, which is a call to the memcmp built-in function.
4082 Return NULL_RTX if we failed and the
4083 caller should emit a normal call, otherwise try to get the result in
4084 TARGET, if convenient (and in mode MODE, if that's convenient). */
4087 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4089 if (!validate_arglist (exp,
4090 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4094 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4095 CALL_EXPR_ARG (exp, 1),
4096 CALL_EXPR_ARG (exp, 2));
4098 return expand_expr (result, target, mode, EXPAND_NORMAL);
4101 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4103 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4106 tree arg1 = CALL_EXPR_ARG (exp, 0);
4107 tree arg2 = CALL_EXPR_ARG (exp, 1);
4108 tree len = CALL_EXPR_ARG (exp, 2);
4111 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4113 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4114 enum machine_mode insn_mode;
4116 #ifdef HAVE_cmpmemsi
4118 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4121 #ifdef HAVE_cmpstrnsi
4123 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4128 /* If we don't have POINTER_TYPE, call the function. */
4129 if (arg1_align == 0 || arg2_align == 0)
4132 /* Make a place to write the result of the instruction. */
4135 && REG_P (result) && GET_MODE (result) == insn_mode
4136 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4137 result = gen_reg_rtx (insn_mode);
4139 arg1_rtx = get_memory_rtx (arg1, len);
4140 arg2_rtx = get_memory_rtx (arg2, len);
4141 arg3_rtx = expand_normal (len);
4143 /* Set MEM_SIZE as appropriate. */
4144 if (GET_CODE (arg3_rtx) == CONST_INT)
4146 set_mem_size (arg1_rtx, arg3_rtx);
4147 set_mem_size (arg2_rtx, arg3_rtx);
4150 #ifdef HAVE_cmpmemsi
4152 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4153 GEN_INT (MIN (arg1_align, arg2_align)));
4156 #ifdef HAVE_cmpstrnsi
4158 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4159 GEN_INT (MIN (arg1_align, arg2_align)));
4167 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4168 TYPE_MODE (integer_type_node), 3,
4169 XEXP (arg1_rtx, 0), Pmode,
4170 XEXP (arg2_rtx, 0), Pmode,
4171 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4172 TYPE_UNSIGNED (sizetype)),
4173 TYPE_MODE (sizetype));
4175 /* Return the value in the proper mode for this function. */
4176 mode = TYPE_MODE (TREE_TYPE (exp));
4177 if (GET_MODE (result) == mode)
4179 else if (target != 0)
4181 convert_move (target, result, 0);
4185 return convert_to_mode (mode, result, 0);
4192 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4193 if we failed the caller should emit a normal call, otherwise try to get
4194 the result in TARGET, if convenient. */
4197 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4199 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4203 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4204 CALL_EXPR_ARG (exp, 1));
4206 return expand_expr (result, target, mode, EXPAND_NORMAL);
4209 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4210 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4211 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4213 rtx arg1_rtx, arg2_rtx;
4214 rtx result, insn = NULL_RTX;
4216 tree arg1 = CALL_EXPR_ARG (exp, 0);
4217 tree arg2 = CALL_EXPR_ARG (exp, 1);
4220 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4222 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4224 /* If we don't have POINTER_TYPE, call the function. */
4225 if (arg1_align == 0 || arg2_align == 0)
4228 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4229 arg1 = builtin_save_expr (arg1);
4230 arg2 = builtin_save_expr (arg2);
4232 arg1_rtx = get_memory_rtx (arg1, NULL);
4233 arg2_rtx = get_memory_rtx (arg2, NULL);
4235 #ifdef HAVE_cmpstrsi
4236 /* Try to call cmpstrsi. */
4239 enum machine_mode insn_mode
4240 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4242 /* Make a place to write the result of the instruction. */
4245 && REG_P (result) && GET_MODE (result) == insn_mode
4246 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4247 result = gen_reg_rtx (insn_mode);
4249 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4250 GEN_INT (MIN (arg1_align, arg2_align)));
4253 #ifdef HAVE_cmpstrnsi
4254 /* Try to determine at least one length and call cmpstrnsi. */
4255 if (!insn && HAVE_cmpstrnsi)
4260 enum machine_mode insn_mode
4261 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4262 tree len1 = c_strlen (arg1, 1);
4263 tree len2 = c_strlen (arg2, 1);
4266 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4268 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4270 /* If we don't have a constant length for the first, use the length
4271 of the second, if we know it. We don't require a constant for
4272 this case; some cost analysis could be done if both are available
4273 but neither is constant. For now, assume they're equally cheap,
4274 unless one has side effects. If both strings have constant lengths,
4281 else if (TREE_SIDE_EFFECTS (len1))
4283 else if (TREE_SIDE_EFFECTS (len2))
4285 else if (TREE_CODE (len1) != INTEGER_CST)
4287 else if (TREE_CODE (len2) != INTEGER_CST)
4289 else if (tree_int_cst_lt (len1, len2))
4294 /* If both arguments have side effects, we cannot optimize. */
4295 if (!len || TREE_SIDE_EFFECTS (len))
4298 arg3_rtx = expand_normal (len);
4300 /* Make a place to write the result of the instruction. */
4303 && REG_P (result) && GET_MODE (result) == insn_mode
4304 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4305 result = gen_reg_rtx (insn_mode);
4307 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4308 GEN_INT (MIN (arg1_align, arg2_align)));
4316 /* Return the value in the proper mode for this function. */
4317 mode = TYPE_MODE (TREE_TYPE (exp));
4318 if (GET_MODE (result) == mode)
4321 return convert_to_mode (mode, result, 0);
4322 convert_move (target, result, 0);
4326 /* Expand the library call ourselves using a stabilized argument
4327 list to avoid re-evaluating the function's arguments twice. */
4328 #ifdef HAVE_cmpstrnsi
4331 fndecl = get_callee_fndecl (exp);
4332 fn = build_call_expr (fndecl, 2, arg1, arg2);
4333 if (TREE_CODE (fn) == CALL_EXPR)
4334 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4335 return expand_call (fn, target, target == const0_rtx);
4341 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4342 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4343 the result in TARGET, if convenient. */
4346 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4348 if (!validate_arglist (exp,
4349 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4353 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4354 CALL_EXPR_ARG (exp, 1),
4355 CALL_EXPR_ARG (exp, 2));
4357 return expand_expr (result, target, mode, EXPAND_NORMAL);
4360 /* If c_strlen can determine an expression for one of the string
4361 lengths, and it doesn't have side effects, then emit cmpstrnsi
4362 using length MIN(strlen(string)+1, arg3). */
4363 #ifdef HAVE_cmpstrnsi
4366 tree len, len1, len2;
4367 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4370 tree arg1 = CALL_EXPR_ARG (exp, 0);
4371 tree arg2 = CALL_EXPR_ARG (exp, 1);
4372 tree arg3 = CALL_EXPR_ARG (exp, 2);
4375 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4377 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4378 enum machine_mode insn_mode
4379 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4381 len1 = c_strlen (arg1, 1);
4382 len2 = c_strlen (arg2, 1);
4385 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4387 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4389 /* If we don't have a constant length for the first, use the length
4390 of the second, if we know it. We don't require a constant for
4391 this case; some cost analysis could be done if both are available
4392 but neither is constant. For now, assume they're equally cheap,
4393 unless one has side effects. If both strings have constant lengths,
4400 else if (TREE_SIDE_EFFECTS (len1))
4402 else if (TREE_SIDE_EFFECTS (len2))
4404 else if (TREE_CODE (len1) != INTEGER_CST)
4406 else if (TREE_CODE (len2) != INTEGER_CST)
4408 else if (tree_int_cst_lt (len1, len2))
4413 /* If both arguments have side effects, we cannot optimize. */
4414 if (!len || TREE_SIDE_EFFECTS (len))
4417 /* The actual new length parameter is MIN(len,arg3). */
4418 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4419 fold_convert (TREE_TYPE (len), arg3));
4421 /* If we don't have POINTER_TYPE, call the function. */
4422 if (arg1_align == 0 || arg2_align == 0)
4425 /* Make a place to write the result of the instruction. */
4428 && REG_P (result) && GET_MODE (result) == insn_mode
4429 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4430 result = gen_reg_rtx (insn_mode);
4432 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4433 arg1 = builtin_save_expr (arg1);
4434 arg2 = builtin_save_expr (arg2);
4435 len = builtin_save_expr (len);
4437 arg1_rtx = get_memory_rtx (arg1, len);
4438 arg2_rtx = get_memory_rtx (arg2, len);
4439 arg3_rtx = expand_normal (len);
4440 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4441 GEN_INT (MIN (arg1_align, arg2_align)));
4446 /* Return the value in the proper mode for this function. */
4447 mode = TYPE_MODE (TREE_TYPE (exp));
4448 if (GET_MODE (result) == mode)
4451 return convert_to_mode (mode, result, 0);
4452 convert_move (target, result, 0);
4456 /* Expand the library call ourselves using a stabilized argument
4457 list to avoid re-evaluating the function's arguments twice. */
4458 fndecl = get_callee_fndecl (exp);
4459 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4460 if (TREE_CODE (fn) == CALL_EXPR)
4461 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4462 return expand_call (fn, target, target == const0_rtx);
4468 /* Expand expression EXP, which is a call to the strcat builtin.
4469 Return NULL_RTX if we failed the caller should emit a normal call,
4470 otherwise try to get the result in TARGET, if convenient. */
4473 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4475 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4479 tree dst = CALL_EXPR_ARG (exp, 0);
4480 tree src = CALL_EXPR_ARG (exp, 1);
4481 const char *p = c_getstr (src);
4483 /* If the string length is zero, return the dst parameter. */
4484 if (p && *p == '\0')
4485 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4489 /* See if we can store by pieces into (dst + strlen(dst)). */
4490 tree newsrc, newdst,
4491 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4494 /* Stabilize the argument list. */
4495 newsrc = builtin_save_expr (src);
4496 dst = builtin_save_expr (dst);
4500 /* Create strlen (dst). */
4501 newdst = build_call_expr (strlen_fn, 1, dst);
4502 /* Create (dst p+ strlen (dst)). */
4504 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4505 newdst = builtin_save_expr (newdst);
4507 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4509 end_sequence (); /* Stop sequence. */
4513 /* Output the entire sequence. */
4514 insns = get_insns ();
4518 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4525 /* Expand expression EXP, which is a call to the strncat builtin.
4526 Return NULL_RTX if we failed the caller should emit a normal call,
4527 otherwise try to get the result in TARGET, if convenient. */
4530 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4532 if (validate_arglist (exp,
4533 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4535 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4536 CALL_EXPR_ARG (exp, 1),
4537 CALL_EXPR_ARG (exp, 2));
4539 return expand_expr (result, target, mode, EXPAND_NORMAL);
4544 /* Expand expression EXP, which is a call to the strspn builtin.
4545 Return NULL_RTX if we failed the caller should emit a normal call,
4546 otherwise try to get the result in TARGET, if convenient. */
4549 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4551 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4553 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4554 CALL_EXPR_ARG (exp, 1));
4556 return expand_expr (result, target, mode, EXPAND_NORMAL);
4561 /* Expand expression EXP, which is a call to the strcspn builtin.
4562 Return NULL_RTX if we failed the caller should emit a normal call,
4563 otherwise try to get the result in TARGET, if convenient. */
4566 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4568 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4570 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4571 CALL_EXPR_ARG (exp, 1));
4573 return expand_expr (result, target, mode, EXPAND_NORMAL);
4578 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4579 if that's convenient. */
4582 expand_builtin_saveregs (void)
4586 /* Don't do __builtin_saveregs more than once in a function.
4587 Save the result of the first call and reuse it. */
4588 if (saveregs_value != 0)
4589 return saveregs_value;
4591 /* When this function is called, it means that registers must be
4592 saved on entry to this function. So we migrate the call to the
4593 first insn of this function. */
4597 /* Do whatever the machine needs done in this case. */
4598 val = targetm.calls.expand_builtin_saveregs ();
4603 saveregs_value = val;
4605 /* Put the insns after the NOTE that starts the function. If this
4606 is inside a start_sequence, make the outer-level insn chain current, so
4607 the code is placed at the start of the function. */
4608 push_topmost_sequence ();
4609 emit_insn_after (seq, entry_of_function ());
4610 pop_topmost_sequence ();
4615 /* __builtin_args_info (N) returns word N of the arg space info
4616 for the current function. The number and meanings of words
4617 is controlled by the definition of CUMULATIVE_ARGS. */
4620 expand_builtin_args_info (tree exp)
4622 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4623 int *word_ptr = (int *) ¤t_function_args_info;
4625 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4627 if (call_expr_nargs (exp) != 0)
4629 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4630 error ("argument of %<__builtin_args_info%> must be constant");
4633 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4635 if (wordnum < 0 || wordnum >= nwords)
4636 error ("argument of %<__builtin_args_info%> out of range");
4638 return GEN_INT (word_ptr[wordnum]);
4642 error ("missing argument in %<__builtin_args_info%>");
4647 /* Expand a call to __builtin_next_arg. */
4650 expand_builtin_next_arg (void)
4652 /* Checking arguments is already done in fold_builtin_next_arg
4653 that must be called before this function. */
4654 return expand_binop (ptr_mode, add_optab,
4655 current_function_internal_arg_pointer,
4656 current_function_arg_offset_rtx,
4657 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4660 /* Make it easier for the backends by protecting the valist argument
4661 from multiple evaluations. */
4664 stabilize_va_list (tree valist, int needs_lvalue)
4666 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4668 if (TREE_SIDE_EFFECTS (valist))
4669 valist = save_expr (valist);
4671 /* For this case, the backends will be expecting a pointer to
4672 TREE_TYPE (va_list_type_node), but it's possible we've
4673 actually been given an array (an actual va_list_type_node).
4675 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4677 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4678 valist = build_fold_addr_expr_with_type (valist, p1);
4687 if (! TREE_SIDE_EFFECTS (valist))
4690 pt = build_pointer_type (va_list_type_node);
4691 valist = fold_build1 (ADDR_EXPR, pt, valist);
4692 TREE_SIDE_EFFECTS (valist) = 1;
4695 if (TREE_SIDE_EFFECTS (valist))
4696 valist = save_expr (valist);
4697 valist = build_fold_indirect_ref (valist);
4703 /* The "standard" definition of va_list is void*. */
4706 std_build_builtin_va_list (void)
4708 return ptr_type_node;
4711 /* The "standard" implementation of va_start: just assign `nextarg' to
4715 std_expand_builtin_va_start (tree valist, rtx nextarg)
4717 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4718 convert_move (va_r, nextarg, 0);
4721 /* Expand EXP, a call to __builtin_va_start. */
4724 expand_builtin_va_start (tree exp)
4729 if (call_expr_nargs (exp) < 2)
4731 error ("too few arguments to function %<va_start%>");
4735 if (fold_builtin_next_arg (exp, true))
4738 nextarg = expand_builtin_next_arg ();
4739 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4741 #ifdef EXPAND_BUILTIN_VA_START
4742 EXPAND_BUILTIN_VA_START (valist, nextarg);
4744 std_expand_builtin_va_start (valist, nextarg);
4750 /* The "standard" implementation of va_arg: read the value from the
4751 current (padded) address and increment by the (padded) size. */
4754 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4756 tree addr, t, type_size, rounded_size, valist_tmp;
4757 unsigned HOST_WIDE_INT align, boundary;
4760 #ifdef ARGS_GROW_DOWNWARD
4761 /* All of the alignment and movement below is for args-grow-up machines.
4762 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4763 implement their own specialized gimplify_va_arg_expr routines. */
4767 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4769 type = build_pointer_type (type);
4771 align = PARM_BOUNDARY / BITS_PER_UNIT;
4772 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4774 /* Hoist the valist value into a temporary for the moment. */
4775 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4777 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4778 requires greater alignment, we must perform dynamic alignment. */
4779 if (boundary > align
4780 && !integer_zerop (TYPE_SIZE (type)))
4782 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4783 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4784 valist_tmp, size_int (boundary - 1)));
4785 gimplify_and_add (t, pre_p);
4787 t = fold_convert (sizetype, valist_tmp);
4788 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4789 fold_convert (TREE_TYPE (valist),
4790 fold_build2 (BIT_AND_EXPR, sizetype, t,
4791 size_int (-boundary))));
4792 gimplify_and_add (t, pre_p);
4797 /* If the actual alignment is less than the alignment of the type,
4798 adjust the type accordingly so that we don't assume strict alignment
4799 when deferencing the pointer. */
4800 boundary *= BITS_PER_UNIT;
4801 if (boundary < TYPE_ALIGN (type))
4803 type = build_variant_type_copy (type);
4804 TYPE_ALIGN (type) = boundary;
4807 /* Compute the rounded size of the type. */
4808 type_size = size_in_bytes (type);
4809 rounded_size = round_up (type_size, align);
4811 /* Reduce rounded_size so it's sharable with the postqueue. */
4812 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4816 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4818 /* Small args are padded downward. */
4819 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4820 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4821 size_binop (MINUS_EXPR, rounded_size, type_size));
4822 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4825 /* Compute new value for AP. */
4826 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4827 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4828 gimplify_and_add (t, pre_p);
4830 addr = fold_convert (build_pointer_type (type), addr);
4833 addr = build_va_arg_indirect_ref (addr);
4835 return build_va_arg_indirect_ref (addr);
4838 /* Build an indirect-ref expression over the given TREE, which represents a
4839 piece of a va_arg() expansion. */
4841 build_va_arg_indirect_ref (tree addr)
4843 addr = build_fold_indirect_ref (addr);
4845 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4851 /* Return a dummy expression of type TYPE in order to keep going after an
4855 dummy_object (tree type)
4857 tree t = build_int_cst (build_pointer_type (type), 0);
4858 return build1 (INDIRECT_REF, type, t);
4861 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4862 builtin function, but a very special sort of operator. */
4864 enum gimplify_status
4865 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4867 tree promoted_type, want_va_type, have_va_type;
4868 tree valist = TREE_OPERAND (*expr_p, 0);
4869 tree type = TREE_TYPE (*expr_p);
4872 /* Verify that valist is of the proper type. */
4873 want_va_type = va_list_type_node;
4874 have_va_type = TREE_TYPE (valist);
4876 if (have_va_type == error_mark_node)
4879 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4881 /* If va_list is an array type, the argument may have decayed
4882 to a pointer type, e.g. by being passed to another function.
4883 In that case, unwrap both types so that we can compare the
4884 underlying records. */
4885 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4886 || POINTER_TYPE_P (have_va_type))
4888 want_va_type = TREE_TYPE (want_va_type);
4889 have_va_type = TREE_TYPE (have_va_type);
4893 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4895 error ("first argument to %<va_arg%> not of type %<va_list%>");
4899 /* Generate a diagnostic for requesting data of a type that cannot
4900 be passed through `...' due to type promotion at the call site. */
4901 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4904 static bool gave_help;
4906 /* Unfortunately, this is merely undefined, rather than a constraint
4907 violation, so we cannot make this an error. If this call is never
4908 executed, the program is still strictly conforming. */
4909 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4910 type, promoted_type);
4914 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4915 promoted_type, type);
4918 /* We can, however, treat "undefined" any way we please.
4919 Call abort to encourage the user to fix the program. */
4920 inform ("if this code is reached, the program will abort");
4921 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4922 append_to_statement_list (t, pre_p);
4924 /* This is dead code, but go ahead and finish so that the
4925 mode of the result comes out right. */
4926 *expr_p = dummy_object (type);
4931 /* Make it easier for the backends by protecting the valist argument
4932 from multiple evaluations. */
4933 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4935 /* For this case, the backends will be expecting a pointer to
4936 TREE_TYPE (va_list_type_node), but it's possible we've
4937 actually been given an array (an actual va_list_type_node).
4939 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4941 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4942 valist = build_fold_addr_expr_with_type (valist, p1);
4944 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4947 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4949 if (!targetm.gimplify_va_arg_expr)
4950 /* FIXME:Once most targets are converted we should merely
4951 assert this is non-null. */
4954 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4959 /* Expand EXP, a call to __builtin_va_end. */
4962 expand_builtin_va_end (tree exp)
4964 tree valist = CALL_EXPR_ARG (exp, 0);
4966 /* Evaluate for side effects, if needed. I hate macros that don't
4968 if (TREE_SIDE_EFFECTS (valist))
4969 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4974 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4975 builtin rather than just as an assignment in stdarg.h because of the
4976 nastiness of array-type va_list types. */
4979 expand_builtin_va_copy (tree exp)
4983 dst = CALL_EXPR_ARG (exp, 0);
4984 src = CALL_EXPR_ARG (exp, 1);
4986 dst = stabilize_va_list (dst, 1);
4987 src = stabilize_va_list (src, 0);
4989 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4991 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4992 TREE_SIDE_EFFECTS (t) = 1;
4993 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4997 rtx dstb, srcb, size;
4999 /* Evaluate to pointers. */
5000 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5001 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5002 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
5003 VOIDmode, EXPAND_NORMAL);
5005 dstb = convert_memory_address (Pmode, dstb);
5006 srcb = convert_memory_address (Pmode, srcb);
5008 /* "Dereference" to BLKmode memories. */
5009 dstb = gen_rtx_MEM (BLKmode, dstb);
5010 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5011 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
5012 srcb = gen_rtx_MEM (BLKmode, srcb);
5013 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5014 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5017 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5023 /* Expand a call to one of the builtin functions __builtin_frame_address or
5024 __builtin_return_address. */
5027 expand_builtin_frame_address (tree fndecl, tree exp)
5029 /* The argument must be a nonnegative integer constant.
5030 It counts the number of frames to scan up the stack.
5031 The value is the return address saved in that frame. */
5032 if (call_expr_nargs (exp) == 0)
5033 /* Warning about missing arg was already issued. */
5035 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5037 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5038 error ("invalid argument to %<__builtin_frame_address%>");
5040 error ("invalid argument to %<__builtin_return_address%>");
5046 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5047 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5049 /* Some ports cannot access arbitrary stack frames. */
5052 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5053 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5055 warning (0, "unsupported argument to %<__builtin_return_address%>");
5059 /* For __builtin_frame_address, return what we've got. */
5060 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5064 && ! CONSTANT_P (tem))
5065 tem = copy_to_mode_reg (Pmode, tem);
5070 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5071 we failed and the caller should emit a normal call, otherwise try to get
5072 the result in TARGET, if convenient. */
5075 expand_builtin_alloca (tree exp, rtx target)
5080 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5081 should always expand to function calls. These can be intercepted
5086 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5089 /* Compute the argument. */
5090 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5092 /* Allocate the desired space. */
5093 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5094 result = convert_memory_address (ptr_mode, result);
5099 /* Expand a call to a bswap builtin with argument ARG0. MODE
5100 is the mode to expand with. */
5103 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5105 enum machine_mode mode;
5109 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5112 arg = CALL_EXPR_ARG (exp, 0);
5113 mode = TYPE_MODE (TREE_TYPE (arg));
5114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5116 target = expand_unop (mode, bswap_optab, op0, target, 1);
5118 gcc_assert (target);
5120 return convert_to_mode (mode, target, 0);
5123 /* Expand a call to a unary builtin in EXP.
5124 Return NULL_RTX if a normal call should be emitted rather than expanding the
5125 function in-line. If convenient, the result should be placed in TARGET.
5126 SUBTARGET may be used as the target for computing one of EXP's operands. */
5129 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5130 rtx subtarget, optab op_optab)
5134 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5137 /* Compute the argument. */
5138 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5139 VOIDmode, EXPAND_NORMAL);
5140 /* Compute op, into TARGET if possible.
5141 Set TARGET to wherever the result comes back. */
5142 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5143 op_optab, op0, target, 1);
5144 gcc_assert (target);
5146 return convert_to_mode (target_mode, target, 0);
5149 /* If the string passed to fputs is a constant and is one character
5150 long, we attempt to transform this call into __builtin_fputc(). */
5153 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5155 /* Verify the arguments in the original call. */
5156 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5158 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5159 CALL_EXPR_ARG (exp, 1),
5160 (target == const0_rtx),
5161 unlocked, NULL_TREE);
5163 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5168 /* Expand a call to __builtin_expect. We just return our argument
5169 as the builtin_expect semantic should've been already executed by
5170 tree branch prediction pass. */
5173 expand_builtin_expect (tree exp, rtx target)
5177 if (call_expr_nargs (exp) < 2)
5179 arg = CALL_EXPR_ARG (exp, 0);
5180 c = CALL_EXPR_ARG (exp, 1);
5182 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5183 /* When guessing was done, the hints should be already stripped away. */
5184 gcc_assert (!flag_guess_branch_prob
5185 || optimize == 0 || errorcount || sorrycount);
5190 expand_builtin_trap (void)
5194 emit_insn (gen_trap ());
5197 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5201 /* Expand EXP, a call to fabs, fabsf or fabsl.
5202 Return NULL_RTX if a normal call should be emitted rather than expanding
5203 the function inline. If convenient, the result should be placed
5204 in TARGET. SUBTARGET may be used as the target for computing
5208 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5210 enum machine_mode mode;
5214 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5217 arg = CALL_EXPR_ARG (exp, 0);
5218 mode = TYPE_MODE (TREE_TYPE (arg));
5219 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5220 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5223 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5224 Return NULL is a normal call should be emitted rather than expanding the
5225 function inline. If convenient, the result should be placed in TARGET.
5226 SUBTARGET may be used as the target for computing the operand. */
5229 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5234 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5237 arg = CALL_EXPR_ARG (exp, 0);
5238 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5240 arg = CALL_EXPR_ARG (exp, 1);
5241 op1 = expand_normal (arg);
5243 return expand_copysign (op0, op1, target);
5246 /* Create a new constant string literal and return a char* pointer to it.
5247 The STRING_CST value is the LEN characters at STR. */
5249 build_string_literal (int len, const char *str)
5251 tree t, elem, index, type;
5253 t = build_string (len, str);
5254 elem = build_type_variant (char_type_node, 1, 0);
5255 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5256 type = build_array_type (elem, index);
5257 TREE_TYPE (t) = type;
5258 TREE_CONSTANT (t) = 1;
5259 TREE_INVARIANT (t) = 1;
5260 TREE_READONLY (t) = 1;
5261 TREE_STATIC (t) = 1;
5263 type = build_pointer_type (type);
5264 t = build1 (ADDR_EXPR, type, t);
5266 type = build_pointer_type (elem);
5267 t = build1 (NOP_EXPR, type, t);
5271 /* Expand EXP, a call to printf or printf_unlocked.
5272 Return NULL_RTX if a normal call should be emitted rather than transforming
5273 the function inline. If convenient, the result should be placed in
5274 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5277 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5280 /* If we're using an unlocked function, assume the other unlocked
5281 functions exist explicitly. */
5282 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5283 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5284 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5285 : implicit_built_in_decls[BUILT_IN_PUTS];
5286 const char *fmt_str;
5289 int nargs = call_expr_nargs (exp);
5291 /* If the return value is used, don't do the transformation. */
5292 if (target != const0_rtx)
5295 /* Verify the required arguments in the original call. */
5298 fmt = CALL_EXPR_ARG (exp, 0);
5299 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5302 /* Check whether the format is a literal string constant. */
5303 fmt_str = c_getstr (fmt);
5304 if (fmt_str == NULL)
5307 if (!init_target_chars ())
5310 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5311 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5314 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5317 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5319 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5320 else if (strcmp (fmt_str, target_percent_c) == 0)
5323 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5326 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5330 /* We can't handle anything else with % args or %% ... yet. */
5331 if (strchr (fmt_str, target_percent))
5337 /* If the format specifier was "", printf does nothing. */
5338 if (fmt_str[0] == '\0')
5340 /* If the format specifier has length of 1, call putchar. */
5341 if (fmt_str[1] == '\0')
5343 /* Given printf("c"), (where c is any one character,)
5344 convert "c"[0] to an int and pass that to the replacement
5346 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5348 fn = build_call_expr (fn_putchar, 1, arg);
5352 /* If the format specifier was "string\n", call puts("string"). */
5353 size_t len = strlen (fmt_str);
5354 if ((unsigned char)fmt_str[len - 1] == target_newline)
5356 /* Create a NUL-terminated string that's one char shorter
5357 than the original, stripping off the trailing '\n'. */
5358 char *newstr = alloca (len);
5359 memcpy (newstr, fmt_str, len - 1);
5360 newstr[len - 1] = 0;
5361 arg = build_string_literal (len, newstr);
5363 fn = build_call_expr (fn_puts, 1, arg);
5366 /* We'd like to arrange to call fputs(string,stdout) here,
5367 but we need stdout and don't have a way to get it yet. */
5374 if (TREE_CODE (fn) == CALL_EXPR)
5375 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5376 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5379 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5380 Return NULL_RTX if a normal call should be emitted rather than transforming
5381 the function inline. If convenient, the result should be placed in
5382 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5385 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5388 /* If we're using an unlocked function, assume the other unlocked
5389 functions exist explicitly. */
5390 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5391 : implicit_built_in_decls[BUILT_IN_FPUTC];
5392 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5393 : implicit_built_in_decls[BUILT_IN_FPUTS];
5394 const char *fmt_str;
5397 int nargs = call_expr_nargs (exp);
5399 /* If the return value is used, don't do the transformation. */
5400 if (target != const0_rtx)
5403 /* Verify the required arguments in the original call. */
5406 fp = CALL_EXPR_ARG (exp, 0);
5407 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5409 fmt = CALL_EXPR_ARG (exp, 1);
5410 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5413 /* Check whether the format is a literal string constant. */
5414 fmt_str = c_getstr (fmt);
5415 if (fmt_str == NULL)
5418 if (!init_target_chars ())
5421 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5422 if (strcmp (fmt_str, target_percent_s) == 0)
5425 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5427 arg = CALL_EXPR_ARG (exp, 2);
5429 fn = build_call_expr (fn_fputs, 2, arg, fp);
5431 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5432 else if (strcmp (fmt_str, target_percent_c) == 0)
5435 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5437 arg = CALL_EXPR_ARG (exp, 2);
5439 fn = build_call_expr (fn_fputc, 2, arg, fp);
5443 /* We can't handle anything else with % args or %% ... yet. */
5444 if (strchr (fmt_str, target_percent))
5450 /* If the format specifier was "", fprintf does nothing. */
5451 if (fmt_str[0] == '\0')
5453 /* Evaluate and ignore FILE* argument for side-effects. */
5454 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5458 /* When "string" doesn't contain %, replace all cases of
5459 fprintf(stream,string) with fputs(string,stream). The fputs
5460 builtin will take care of special cases like length == 1. */
5462 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5467 if (TREE_CODE (fn) == CALL_EXPR)
5468 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5469 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5472 /* Expand a call EXP to sprintf. Return NULL_RTX if
5473 a normal call should be emitted rather than expanding the function
5474 inline. If convenient, the result should be placed in TARGET with
5478 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5481 const char *fmt_str;
5482 int nargs = call_expr_nargs (exp);
5484 /* Verify the required arguments in the original call. */
5487 dest = CALL_EXPR_ARG (exp, 0);
5488 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5490 fmt = CALL_EXPR_ARG (exp, 0);
5491 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5494 /* Check whether the format is a literal string constant. */
5495 fmt_str = c_getstr (fmt);
5496 if (fmt_str == NULL)
5499 if (!init_target_chars ())
5502 /* If the format doesn't contain % args or %%, use strcpy. */
5503 if (strchr (fmt_str, target_percent) == 0)
5505 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5508 if ((nargs > 2) || ! fn)
5510 expand_expr (build_call_expr (fn, 2, dest, fmt),
5511 const0_rtx, VOIDmode, EXPAND_NORMAL);
5512 if (target == const0_rtx)
5514 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5515 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5517 /* If the format is "%s", use strcpy if the result isn't used. */
5518 else if (strcmp (fmt_str, target_percent_s) == 0)
5521 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5527 arg = CALL_EXPR_ARG (exp, 2);
5528 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5531 if (target != const0_rtx)
5533 len = c_strlen (arg, 1);
5534 if (! len || TREE_CODE (len) != INTEGER_CST)
5540 expand_expr (build_call_expr (fn, 2, dest, arg),
5541 const0_rtx, VOIDmode, EXPAND_NORMAL);
5543 if (target == const0_rtx)
5545 return expand_expr (len, target, mode, EXPAND_NORMAL);
5551 /* Expand a call to either the entry or exit function profiler. */
5554 expand_builtin_profile_func (bool exitp)
5558 this = DECL_RTL (current_function_decl);
5559 gcc_assert (MEM_P (this));
5560 this = XEXP (this, 0);
5563 which = profile_function_exit_libfunc;
5565 which = profile_function_entry_libfunc;
5567 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5568 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5575 /* Expand a call to __builtin___clear_cache. */
5578 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5580 #ifndef HAVE_clear_cache
5581 #ifdef CLEAR_INSN_CACHE
5582 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5583 does something. Just do the default expansion to a call to
5587 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5588 does nothing. There is no need to call it. Do nothing. */
5590 #endif /* CLEAR_INSN_CACHE */
5592 /* We have a "clear_cache" insn, and it will handle everything. */
5594 rtx begin_rtx, end_rtx;
5595 enum insn_code icode;
5597 /* We must not expand to a library call. If we did, any
5598 fallback library function in libgcc that might contain a call to
5599 __builtin___clear_cache() would recurse infinitely. */
5600 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5602 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5606 if (HAVE_clear_cache)
5608 icode = CODE_FOR_clear_cache;
5610 begin = CALL_EXPR_ARG (exp, 0);
5611 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5612 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5613 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5614 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5616 end = CALL_EXPR_ARG (exp, 1);
5617 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5618 end_rtx = convert_memory_address (Pmode, end_rtx);
5619 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5620 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5622 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5625 #endif /* HAVE_clear_cache */
5628 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5631 round_trampoline_addr (rtx tramp)
5633 rtx temp, addend, mask;
5635 /* If we don't need too much alignment, we'll have been guaranteed
5636 proper alignment by get_trampoline_type. */
5637 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5640 /* Round address up to desired boundary. */
5641 temp = gen_reg_rtx (Pmode);
5642 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5643 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5645 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5646 temp, 0, OPTAB_LIB_WIDEN);
5647 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5648 temp, 0, OPTAB_LIB_WIDEN);
5654 expand_builtin_init_trampoline (tree exp)
5656 tree t_tramp, t_func, t_chain;
5657 rtx r_tramp, r_func, r_chain;
5658 #ifdef TRAMPOLINE_TEMPLATE
5662 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5663 POINTER_TYPE, VOID_TYPE))
5666 t_tramp = CALL_EXPR_ARG (exp, 0);
5667 t_func = CALL_EXPR_ARG (exp, 1);
5668 t_chain = CALL_EXPR_ARG (exp, 2);
5670 r_tramp = expand_normal (t_tramp);
5671 r_func = expand_normal (t_func);
5672 r_chain = expand_normal (t_chain);
5674 /* Generate insns to initialize the trampoline. */
5675 r_tramp = round_trampoline_addr (r_tramp);
5676 #ifdef TRAMPOLINE_TEMPLATE
5677 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5678 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5679 emit_block_move (blktramp, assemble_trampoline_template (),
5680 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5682 trampolines_created = 1;
5683 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5689 expand_builtin_adjust_trampoline (tree exp)
5693 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5696 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5697 tramp = round_trampoline_addr (tramp);
5698 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5699 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5705 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5706 function. The function first checks whether the back end provides
5707 an insn to implement signbit for the respective mode. If not, it
5708 checks whether the floating point format of the value is such that
5709 the sign bit can be extracted. If that is not the case, the
5710 function returns NULL_RTX to indicate that a normal call should be
5711 emitted rather than expanding the function in-line. EXP is the
5712 expression that is a call to the builtin function; if convenient,
5713 the result should be placed in TARGET. */
5715 expand_builtin_signbit (tree exp, rtx target)
5717 const struct real_format *fmt;
5718 enum machine_mode fmode, imode, rmode;
5719 HOST_WIDE_INT hi, lo;
5722 enum insn_code icode;
5725 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5728 arg = CALL_EXPR_ARG (exp, 0);
5729 fmode = TYPE_MODE (TREE_TYPE (arg));
5730 rmode = TYPE_MODE (TREE_TYPE (exp));
5731 fmt = REAL_MODE_FORMAT (fmode);
5733 arg = builtin_save_expr (arg);
5735 /* Expand the argument yielding a RTX expression. */
5736 temp = expand_normal (arg);
5738 /* Check if the back end provides an insn that handles signbit for the
5740 icode = signbit_optab->handlers [(int) fmode].insn_code;
5741 if (icode != CODE_FOR_nothing)
5743 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5744 emit_unop_insn (icode, target, temp, UNKNOWN);
5748 /* For floating point formats without a sign bit, implement signbit
5750 bitpos = fmt->signbit_ro;
5753 /* But we can't do this if the format supports signed zero. */
5754 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5757 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5758 build_real (TREE_TYPE (arg), dconst0));
5759 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5762 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5764 imode = int_mode_for_mode (fmode);
5765 if (imode == BLKmode)
5767 temp = gen_lowpart (imode, temp);
5772 /* Handle targets with different FP word orders. */
5773 if (FLOAT_WORDS_BIG_ENDIAN)
5774 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5776 word = bitpos / BITS_PER_WORD;
5777 temp = operand_subword_force (temp, word, fmode);
5778 bitpos = bitpos % BITS_PER_WORD;
5781 /* Force the intermediate word_mode (or narrower) result into a
5782 register. This avoids attempting to create paradoxical SUBREGs
5783 of floating point modes below. */
5784 temp = force_reg (imode, temp);
5786 /* If the bitpos is within the "result mode" lowpart, the operation
5787 can be implement with a single bitwise AND. Otherwise, we need
5788 a right shift and an AND. */
5790 if (bitpos < GET_MODE_BITSIZE (rmode))
5792 if (bitpos < HOST_BITS_PER_WIDE_INT)
5795 lo = (HOST_WIDE_INT) 1 << bitpos;
5799 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5804 temp = gen_lowpart (rmode, temp);
5805 temp = expand_binop (rmode, and_optab, temp,
5806 immed_double_const (lo, hi, rmode),
5807 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5811 /* Perform a logical right shift to place the signbit in the least
5812 significant bit, then truncate the result to the desired mode
5813 and mask just this bit. */
5814 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5815 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5816 temp = gen_lowpart (rmode, temp);
5817 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5818 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5824 /* Expand fork or exec calls. TARGET is the desired target of the
5825 call. EXP is the call. FN is the
5826 identificator of the actual function. IGNORE is nonzero if the
5827 value is to be ignored. */
5830 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5835 /* If we are not profiling, just call the function. */
5836 if (!profile_arc_flag)
5839 /* Otherwise call the wrapper. This should be equivalent for the rest of
5840 compiler, so the code does not diverge, and the wrapper may run the
5841 code necessary for keeping the profiling sane. */
5843 switch (DECL_FUNCTION_CODE (fn))
5846 id = get_identifier ("__gcov_fork");
5849 case BUILT_IN_EXECL:
5850 id = get_identifier ("__gcov_execl");
5853 case BUILT_IN_EXECV:
5854 id = get_identifier ("__gcov_execv");
5857 case BUILT_IN_EXECLP:
5858 id = get_identifier ("__gcov_execlp");
5861 case BUILT_IN_EXECLE:
5862 id = get_identifier ("__gcov_execle");
5865 case BUILT_IN_EXECVP:
5866 id = get_identifier ("__gcov_execvp");
5869 case BUILT_IN_EXECVE:
5870 id = get_identifier ("__gcov_execve");
5877 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5878 DECL_EXTERNAL (decl) = 1;
5879 TREE_PUBLIC (decl) = 1;
5880 DECL_ARTIFICIAL (decl) = 1;
5881 TREE_NOTHROW (decl) = 1;
5882 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5883 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5884 call = rewrite_call_expr (exp, 0, decl, 0);
5885 return expand_call (call, target, ignore);
5890 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5891 the pointer in these functions is void*, the tree optimizers may remove
5892 casts. The mode computed in expand_builtin isn't reliable either, due
5893 to __sync_bool_compare_and_swap.
5895 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5896 group of builtins. This gives us log2 of the mode size. */
5898 static inline enum machine_mode
5899 get_builtin_sync_mode (int fcode_diff)
5901 /* The size is not negotiable, so ask not to get BLKmode in return
5902 if the target indicates that a smaller size would be better. */
5903 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5906 /* Expand the memory expression LOC and return the appropriate memory operand
5907 for the builtin_sync operations. */
5910 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5914 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5916 /* Note that we explicitly do not want any alias information for this
5917 memory, so that we kill all other live memories. Otherwise we don't
5918 satisfy the full barrier semantics of the intrinsic. */
5919 mem = validize_mem (gen_rtx_MEM (mode, addr));
5921 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5922 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5923 MEM_VOLATILE_P (mem) = 1;
5928 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5929 EXP is the CALL_EXPR. CODE is the rtx code
5930 that corresponds to the arithmetic or logical operation from the name;
5931 an exception here is that NOT actually means NAND. TARGET is an optional
5932 place for us to store the results; AFTER is true if this is the
5933 fetch_and_xxx form. IGNORE is true if we don't actually care about
5934 the result of the operation at all. */
5937 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5938 enum rtx_code code, bool after,
5939 rtx target, bool ignore)
5942 enum machine_mode old_mode;
5944 /* Expand the operands. */
5945 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5947 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5948 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5949 of CONST_INTs, where we know the old_mode only from the call argument. */
5950 old_mode = GET_MODE (val);
5951 if (old_mode == VOIDmode)
5952 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5953 val = convert_modes (mode, old_mode, val, 1);
5956 return expand_sync_operation (mem, val, code);
5958 return expand_sync_fetch_operation (mem, val, code, after, target);
5961 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5962 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5963 true if this is the boolean form. TARGET is a place for us to store the
5964 results; this is NOT optional if IS_BOOL is true. */
5967 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5968 bool is_bool, rtx target)
5970 rtx old_val, new_val, mem;
5971 enum machine_mode old_mode;
5973 /* Expand the operands. */
5974 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5977 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5978 mode, EXPAND_NORMAL);
5979 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5980 of CONST_INTs, where we know the old_mode only from the call argument. */
5981 old_mode = GET_MODE (old_val);
5982 if (old_mode == VOIDmode)
5983 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5984 old_val = convert_modes (mode, old_mode, old_val, 1);
5986 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5987 mode, EXPAND_NORMAL);
5988 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5989 of CONST_INTs, where we know the old_mode only from the call argument. */
5990 old_mode = GET_MODE (new_val);
5991 if (old_mode == VOIDmode)
5992 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5993 new_val = convert_modes (mode, old_mode, new_val, 1);
5996 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5998 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6001 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6002 general form is actually an atomic exchange, and some targets only
6003 support a reduced form with the second argument being a constant 1.
6004 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6008 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6012 enum machine_mode old_mode;
6014 /* Expand the operands. */
6015 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6016 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6017 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6018 of CONST_INTs, where we know the old_mode only from the call argument. */
6019 old_mode = GET_MODE (val);
6020 if (old_mode == VOIDmode)
6021 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6022 val = convert_modes (mode, old_mode, val, 1);
6024 return expand_sync_lock_test_and_set (mem, val, target);
6027 /* Expand the __sync_synchronize intrinsic. */
6030 expand_builtin_synchronize (void)
6034 #ifdef HAVE_memory_barrier
6035 if (HAVE_memory_barrier)
6037 emit_insn (gen_memory_barrier ());
6042 /* If no explicit memory barrier instruction is available, create an
6043 empty asm stmt with a memory clobber. */
6044 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6045 tree_cons (NULL, build_string (6, "memory"), NULL));
6046 ASM_VOLATILE_P (x) = 1;
6047 expand_asm_expr (x);
6050 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6053 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6055 enum insn_code icode;
6057 rtx val = const0_rtx;
6059 /* Expand the operands. */
6060 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6062 /* If there is an explicit operation in the md file, use it. */
6063 icode = sync_lock_release[mode];
6064 if (icode != CODE_FOR_nothing)
6066 if (!insn_data[icode].operand[1].predicate (val, mode))
6067 val = force_reg (mode, val);
6069 insn = GEN_FCN (icode) (mem, val);
6077 /* Otherwise we can implement this operation by emitting a barrier
6078 followed by a store of zero. */
6079 expand_builtin_synchronize ();
6080 emit_move_insn (mem, val);
6083 /* Expand an expression EXP that calls a built-in function,
6084 with result going to TARGET if that's convenient
6085 (and in mode MODE if that's convenient).
6086 SUBTARGET may be used as the target for computing one of EXP's operands.
6087 IGNORE is nonzero if the value is to be ignored. */
6090 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6093 tree fndecl = get_callee_fndecl (exp);
6094 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6095 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6097 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6098 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6100 /* When not optimizing, generate calls to library functions for a certain
6103 && !called_as_built_in (fndecl)
6104 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6105 && fcode != BUILT_IN_ALLOCA)
6106 return expand_call (exp, target, ignore);
6108 /* The built-in function expanders test for target == const0_rtx
6109 to determine whether the function's result will be ignored. */
6111 target = const0_rtx;
6113 /* If the result of a pure or const built-in function is ignored, and
6114 none of its arguments are volatile, we can avoid expanding the
6115 built-in call and just evaluate the arguments for side-effects. */
6116 if (target == const0_rtx
6117 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6119 bool volatilep = false;
6121 call_expr_arg_iterator iter;
6123 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6124 if (TREE_THIS_VOLATILE (arg))
6132 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6133 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6140 CASE_FLT_FN (BUILT_IN_FABS):
6141 target = expand_builtin_fabs (exp, target, subtarget);
6146 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6147 target = expand_builtin_copysign (exp, target, subtarget);
6152 /* Just do a normal library call if we were unable to fold
6154 CASE_FLT_FN (BUILT_IN_CABS):
6157 CASE_FLT_FN (BUILT_IN_EXP):
6158 CASE_FLT_FN (BUILT_IN_EXP10):
6159 CASE_FLT_FN (BUILT_IN_POW10):
6160 CASE_FLT_FN (BUILT_IN_EXP2):
6161 CASE_FLT_FN (BUILT_IN_EXPM1):
6162 CASE_FLT_FN (BUILT_IN_LOGB):
6163 CASE_FLT_FN (BUILT_IN_LOG):
6164 CASE_FLT_FN (BUILT_IN_LOG10):
6165 CASE_FLT_FN (BUILT_IN_LOG2):
6166 CASE_FLT_FN (BUILT_IN_LOG1P):
6167 CASE_FLT_FN (BUILT_IN_TAN):
6168 CASE_FLT_FN (BUILT_IN_ASIN):
6169 CASE_FLT_FN (BUILT_IN_ACOS):
6170 CASE_FLT_FN (BUILT_IN_ATAN):
6171 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6172 because of possible accuracy problems. */
6173 if (! flag_unsafe_math_optimizations)
6175 CASE_FLT_FN (BUILT_IN_SQRT):
6176 CASE_FLT_FN (BUILT_IN_FLOOR):
6177 CASE_FLT_FN (BUILT_IN_CEIL):
6178 CASE_FLT_FN (BUILT_IN_TRUNC):
6179 CASE_FLT_FN (BUILT_IN_ROUND):
6180 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6181 CASE_FLT_FN (BUILT_IN_RINT):
6182 target = expand_builtin_mathfn (exp, target, subtarget);
6187 CASE_FLT_FN (BUILT_IN_ILOGB):
6188 if (! flag_unsafe_math_optimizations)
6190 CASE_FLT_FN (BUILT_IN_ISINF):
6191 CASE_FLT_FN (BUILT_IN_FINITE):
6192 case BUILT_IN_ISFINITE:
6193 case BUILT_IN_ISNORMAL:
6194 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6199 CASE_FLT_FN (BUILT_IN_LCEIL):
6200 CASE_FLT_FN (BUILT_IN_LLCEIL):
6201 CASE_FLT_FN (BUILT_IN_LFLOOR):
6202 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6203 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6208 CASE_FLT_FN (BUILT_IN_LRINT):
6209 CASE_FLT_FN (BUILT_IN_LLRINT):
6210 CASE_FLT_FN (BUILT_IN_LROUND):
6211 CASE_FLT_FN (BUILT_IN_LLROUND):
6212 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6217 CASE_FLT_FN (BUILT_IN_POW):
6218 target = expand_builtin_pow (exp, target, subtarget);
6223 CASE_FLT_FN (BUILT_IN_POWI):
6224 target = expand_builtin_powi (exp, target, subtarget);
6229 CASE_FLT_FN (BUILT_IN_ATAN2):
6230 CASE_FLT_FN (BUILT_IN_LDEXP):
6231 CASE_FLT_FN (BUILT_IN_SCALB):
6232 CASE_FLT_FN (BUILT_IN_SCALBN):
6233 CASE_FLT_FN (BUILT_IN_SCALBLN):
6234 if (! flag_unsafe_math_optimizations)
6237 CASE_FLT_FN (BUILT_IN_FMOD):
6238 CASE_FLT_FN (BUILT_IN_REMAINDER):
6239 CASE_FLT_FN (BUILT_IN_DREM):
6240 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6245 CASE_FLT_FN (BUILT_IN_CEXPI):
6246 target = expand_builtin_cexpi (exp, target, subtarget);
6247 gcc_assert (target);
6250 CASE_FLT_FN (BUILT_IN_SIN):
6251 CASE_FLT_FN (BUILT_IN_COS):
6252 if (! flag_unsafe_math_optimizations)
6254 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6259 CASE_FLT_FN (BUILT_IN_SINCOS):
6260 if (! flag_unsafe_math_optimizations)
6262 target = expand_builtin_sincos (exp);
6267 case BUILT_IN_APPLY_ARGS:
6268 return expand_builtin_apply_args ();
6270 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6271 FUNCTION with a copy of the parameters described by
6272 ARGUMENTS, and ARGSIZE. It returns a block of memory
6273 allocated on the stack into which is stored all the registers
6274 that might possibly be used for returning the result of a
6275 function. ARGUMENTS is the value returned by
6276 __builtin_apply_args. ARGSIZE is the number of bytes of
6277 arguments that must be copied. ??? How should this value be
6278 computed? We'll also need a safe worst case value for varargs
6280 case BUILT_IN_APPLY:
6281 if (!validate_arglist (exp, POINTER_TYPE,
6282 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6283 && !validate_arglist (exp, REFERENCE_TYPE,
6284 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6290 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6291 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6292 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6294 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6297 /* __builtin_return (RESULT) causes the function to return the
6298 value described by RESULT. RESULT is address of the block of
6299 memory returned by __builtin_apply. */
6300 case BUILT_IN_RETURN:
6301 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6302 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6305 case BUILT_IN_SAVEREGS:
6306 return expand_builtin_saveregs ();
6308 case BUILT_IN_ARGS_INFO:
6309 return expand_builtin_args_info (exp);
6311 /* Return the address of the first anonymous stack arg. */
6312 case BUILT_IN_NEXT_ARG:
6313 if (fold_builtin_next_arg (exp, false))
6315 return expand_builtin_next_arg ();
6317 case BUILT_IN_CLEAR_CACHE:
6318 target = expand_builtin___clear_cache (exp);
6323 case BUILT_IN_CLASSIFY_TYPE:
6324 return expand_builtin_classify_type (exp);
6326 case BUILT_IN_CONSTANT_P:
6329 case BUILT_IN_FRAME_ADDRESS:
6330 case BUILT_IN_RETURN_ADDRESS:
6331 return expand_builtin_frame_address (fndecl, exp);
6333 /* Returns the address of the area where the structure is returned.
6335 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6336 if (call_expr_nargs (exp) != 0
6337 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6338 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6341 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6343 case BUILT_IN_ALLOCA:
6344 target = expand_builtin_alloca (exp, target);
6349 case BUILT_IN_STACK_SAVE:
6350 return expand_stack_save ();
6352 case BUILT_IN_STACK_RESTORE:
6353 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6356 case BUILT_IN_BSWAP32:
6357 case BUILT_IN_BSWAP64:
6358 target = expand_builtin_bswap (exp, target, subtarget);
6364 CASE_INT_FN (BUILT_IN_FFS):
6365 case BUILT_IN_FFSIMAX:
6366 target = expand_builtin_unop (target_mode, exp, target,
6367 subtarget, ffs_optab);
6372 CASE_INT_FN (BUILT_IN_CLZ):
6373 case BUILT_IN_CLZIMAX:
6374 target = expand_builtin_unop (target_mode, exp, target,
6375 subtarget, clz_optab);
6380 CASE_INT_FN (BUILT_IN_CTZ):
6381 case BUILT_IN_CTZIMAX:
6382 target = expand_builtin_unop (target_mode, exp, target,
6383 subtarget, ctz_optab);
6388 CASE_INT_FN (BUILT_IN_POPCOUNT):
6389 case BUILT_IN_POPCOUNTIMAX:
6390 target = expand_builtin_unop (target_mode, exp, target,
6391 subtarget, popcount_optab);
6396 CASE_INT_FN (BUILT_IN_PARITY):
6397 case BUILT_IN_PARITYIMAX:
6398 target = expand_builtin_unop (target_mode, exp, target,
6399 subtarget, parity_optab);
6404 case BUILT_IN_STRLEN:
6405 target = expand_builtin_strlen (exp, target, target_mode);
6410 case BUILT_IN_STRCPY:
6411 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6416 case BUILT_IN_STRNCPY:
6417 target = expand_builtin_strncpy (exp, target, mode);
6422 case BUILT_IN_STPCPY:
6423 target = expand_builtin_stpcpy (exp, target, mode);
6428 case BUILT_IN_STRCAT:
6429 target = expand_builtin_strcat (fndecl, exp, target, mode);
6434 case BUILT_IN_STRNCAT:
6435 target = expand_builtin_strncat (exp, target, mode);
6440 case BUILT_IN_STRSPN:
6441 target = expand_builtin_strspn (exp, target, mode);
6446 case BUILT_IN_STRCSPN:
6447 target = expand_builtin_strcspn (exp, target, mode);
6452 case BUILT_IN_STRSTR:
6453 target = expand_builtin_strstr (exp, target, mode);
6458 case BUILT_IN_STRPBRK:
6459 target = expand_builtin_strpbrk (exp, target, mode);
6464 case BUILT_IN_INDEX:
6465 case BUILT_IN_STRCHR:
6466 target = expand_builtin_strchr (exp, target, mode);
6471 case BUILT_IN_RINDEX:
6472 case BUILT_IN_STRRCHR:
6473 target = expand_builtin_strrchr (exp, target, mode);
6478 case BUILT_IN_MEMCPY:
6479 target = expand_builtin_memcpy (exp, target, mode);
6484 case BUILT_IN_MEMPCPY:
6485 target = expand_builtin_mempcpy (exp, target, mode);
6490 case BUILT_IN_MEMMOVE:
6491 target = expand_builtin_memmove (exp, target, mode, ignore);
6496 case BUILT_IN_BCOPY:
6497 target = expand_builtin_bcopy (exp, ignore);
6502 case BUILT_IN_MEMSET:
6503 target = expand_builtin_memset (exp, target, mode);
6508 case BUILT_IN_BZERO:
6509 target = expand_builtin_bzero (exp);
6514 case BUILT_IN_STRCMP:
6515 target = expand_builtin_strcmp (exp, target, mode);
6520 case BUILT_IN_STRNCMP:
6521 target = expand_builtin_strncmp (exp, target, mode);
6526 case BUILT_IN_MEMCHR:
6527 target = expand_builtin_memchr (exp, target, mode);
6533 case BUILT_IN_MEMCMP:
6534 target = expand_builtin_memcmp (exp, target, mode);
6539 case BUILT_IN_SETJMP:
6540 /* This should have been lowered to the builtins below. */
6543 case BUILT_IN_SETJMP_SETUP:
6544 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6545 and the receiver label. */
6546 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6548 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6549 VOIDmode, EXPAND_NORMAL);
6550 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6551 rtx label_r = label_rtx (label);
6553 /* This is copied from the handling of non-local gotos. */
6554 expand_builtin_setjmp_setup (buf_addr, label_r);
6555 nonlocal_goto_handler_labels
6556 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6557 nonlocal_goto_handler_labels);
6558 /* ??? Do not let expand_label treat us as such since we would
6559 not want to be both on the list of non-local labels and on
6560 the list of forced labels. */
6561 FORCED_LABEL (label) = 0;
6566 case BUILT_IN_SETJMP_DISPATCHER:
6567 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6568 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6570 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6571 rtx label_r = label_rtx (label);
6573 /* Remove the dispatcher label from the list of non-local labels
6574 since the receiver labels have been added to it above. */
6575 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6580 case BUILT_IN_SETJMP_RECEIVER:
6581 /* __builtin_setjmp_receiver is passed the receiver label. */
6582 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6584 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6585 rtx label_r = label_rtx (label);
6587 expand_builtin_setjmp_receiver (label_r);
6592 /* __builtin_longjmp is passed a pointer to an array of five words.
6593 It's similar to the C library longjmp function but works with
6594 __builtin_setjmp above. */
6595 case BUILT_IN_LONGJMP:
6596 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6598 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6599 VOIDmode, EXPAND_NORMAL);
6600 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6602 if (value != const1_rtx)
6604 error ("%<__builtin_longjmp%> second argument must be 1");
6608 expand_builtin_longjmp (buf_addr, value);
6613 case BUILT_IN_NONLOCAL_GOTO:
6614 target = expand_builtin_nonlocal_goto (exp);
6619 /* This updates the setjmp buffer that is its argument with the value
6620 of the current stack pointer. */
6621 case BUILT_IN_UPDATE_SETJMP_BUF:
6622 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6625 = expand_normal (CALL_EXPR_ARG (exp, 0));
6627 expand_builtin_update_setjmp_buf (buf_addr);
6633 expand_builtin_trap ();
6636 case BUILT_IN_PRINTF:
6637 target = expand_builtin_printf (exp, target, mode, false);
6642 case BUILT_IN_PRINTF_UNLOCKED:
6643 target = expand_builtin_printf (exp, target, mode, true);
6648 case BUILT_IN_FPUTS:
6649 target = expand_builtin_fputs (exp, target, false);
6653 case BUILT_IN_FPUTS_UNLOCKED:
6654 target = expand_builtin_fputs (exp, target, true);
6659 case BUILT_IN_FPRINTF:
6660 target = expand_builtin_fprintf (exp, target, mode, false);
6665 case BUILT_IN_FPRINTF_UNLOCKED:
6666 target = expand_builtin_fprintf (exp, target, mode, true);
6671 case BUILT_IN_SPRINTF:
6672 target = expand_builtin_sprintf (exp, target, mode);
6677 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6678 case BUILT_IN_SIGNBITD32:
6679 case BUILT_IN_SIGNBITD64:
6680 case BUILT_IN_SIGNBITD128:
6681 target = expand_builtin_signbit (exp, target);
6686 /* Various hooks for the DWARF 2 __throw routine. */
6687 case BUILT_IN_UNWIND_INIT:
6688 expand_builtin_unwind_init ();
6690 case BUILT_IN_DWARF_CFA:
6691 return virtual_cfa_rtx;
6692 #ifdef DWARF2_UNWIND_INFO
6693 case BUILT_IN_DWARF_SP_COLUMN:
6694 return expand_builtin_dwarf_sp_column ();
6695 case BUILT_IN_INIT_DWARF_REG_SIZES:
6696 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6699 case BUILT_IN_FROB_RETURN_ADDR:
6700 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6701 case BUILT_IN_EXTRACT_RETURN_ADDR:
6702 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6703 case BUILT_IN_EH_RETURN:
6704 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6705 CALL_EXPR_ARG (exp, 1));
6707 #ifdef EH_RETURN_DATA_REGNO
6708 case BUILT_IN_EH_RETURN_DATA_REGNO:
6709 return expand_builtin_eh_return_data_regno (exp);
6711 case BUILT_IN_EXTEND_POINTER:
6712 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6714 case BUILT_IN_VA_START:
6715 case BUILT_IN_STDARG_START:
6716 return expand_builtin_va_start (exp);
6717 case BUILT_IN_VA_END:
6718 return expand_builtin_va_end (exp);
6719 case BUILT_IN_VA_COPY:
6720 return expand_builtin_va_copy (exp);
6721 case BUILT_IN_EXPECT:
6722 return expand_builtin_expect (exp, target);
6723 case BUILT_IN_PREFETCH:
6724 expand_builtin_prefetch (exp);
6727 case BUILT_IN_PROFILE_FUNC_ENTER:
6728 return expand_builtin_profile_func (false);
6729 case BUILT_IN_PROFILE_FUNC_EXIT:
6730 return expand_builtin_profile_func (true);
6732 case BUILT_IN_INIT_TRAMPOLINE:
6733 return expand_builtin_init_trampoline (exp);
6734 case BUILT_IN_ADJUST_TRAMPOLINE:
6735 return expand_builtin_adjust_trampoline (exp);
6738 case BUILT_IN_EXECL:
6739 case BUILT_IN_EXECV:
6740 case BUILT_IN_EXECLP:
6741 case BUILT_IN_EXECLE:
6742 case BUILT_IN_EXECVP:
6743 case BUILT_IN_EXECVE:
6744 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6749 case BUILT_IN_FETCH_AND_ADD_1:
6750 case BUILT_IN_FETCH_AND_ADD_2:
6751 case BUILT_IN_FETCH_AND_ADD_4:
6752 case BUILT_IN_FETCH_AND_ADD_8:
6753 case BUILT_IN_FETCH_AND_ADD_16:
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6755 target = expand_builtin_sync_operation (mode, exp, PLUS,
6756 false, target, ignore);
6761 case BUILT_IN_FETCH_AND_SUB_1:
6762 case BUILT_IN_FETCH_AND_SUB_2:
6763 case BUILT_IN_FETCH_AND_SUB_4:
6764 case BUILT_IN_FETCH_AND_SUB_8:
6765 case BUILT_IN_FETCH_AND_SUB_16:
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6767 target = expand_builtin_sync_operation (mode, exp, MINUS,
6768 false, target, ignore);
6773 case BUILT_IN_FETCH_AND_OR_1:
6774 case BUILT_IN_FETCH_AND_OR_2:
6775 case BUILT_IN_FETCH_AND_OR_4:
6776 case BUILT_IN_FETCH_AND_OR_8:
6777 case BUILT_IN_FETCH_AND_OR_16:
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6779 target = expand_builtin_sync_operation (mode, exp, IOR,
6780 false, target, ignore);
6785 case BUILT_IN_FETCH_AND_AND_1:
6786 case BUILT_IN_FETCH_AND_AND_2:
6787 case BUILT_IN_FETCH_AND_AND_4:
6788 case BUILT_IN_FETCH_AND_AND_8:
6789 case BUILT_IN_FETCH_AND_AND_16:
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6791 target = expand_builtin_sync_operation (mode, exp, AND,
6792 false, target, ignore);
6797 case BUILT_IN_FETCH_AND_XOR_1:
6798 case BUILT_IN_FETCH_AND_XOR_2:
6799 case BUILT_IN_FETCH_AND_XOR_4:
6800 case BUILT_IN_FETCH_AND_XOR_8:
6801 case BUILT_IN_FETCH_AND_XOR_16:
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6803 target = expand_builtin_sync_operation (mode, exp, XOR,
6804 false, target, ignore);
6809 case BUILT_IN_FETCH_AND_NAND_1:
6810 case BUILT_IN_FETCH_AND_NAND_2:
6811 case BUILT_IN_FETCH_AND_NAND_4:
6812 case BUILT_IN_FETCH_AND_NAND_8:
6813 case BUILT_IN_FETCH_AND_NAND_16:
6814 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6815 target = expand_builtin_sync_operation (mode, exp, NOT,
6816 false, target, ignore);
6821 case BUILT_IN_ADD_AND_FETCH_1:
6822 case BUILT_IN_ADD_AND_FETCH_2:
6823 case BUILT_IN_ADD_AND_FETCH_4:
6824 case BUILT_IN_ADD_AND_FETCH_8:
6825 case BUILT_IN_ADD_AND_FETCH_16:
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6827 target = expand_builtin_sync_operation (mode, exp, PLUS,
6828 true, target, ignore);
6833 case BUILT_IN_SUB_AND_FETCH_1:
6834 case BUILT_IN_SUB_AND_FETCH_2:
6835 case BUILT_IN_SUB_AND_FETCH_4:
6836 case BUILT_IN_SUB_AND_FETCH_8:
6837 case BUILT_IN_SUB_AND_FETCH_16:
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6839 target = expand_builtin_sync_operation (mode, exp, MINUS,
6840 true, target, ignore);
6845 case BUILT_IN_OR_AND_FETCH_1:
6846 case BUILT_IN_OR_AND_FETCH_2:
6847 case BUILT_IN_OR_AND_FETCH_4:
6848 case BUILT_IN_OR_AND_FETCH_8:
6849 case BUILT_IN_OR_AND_FETCH_16:
6850 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6851 target = expand_builtin_sync_operation (mode, exp, IOR,
6852 true, target, ignore);
6857 case BUILT_IN_AND_AND_FETCH_1:
6858 case BUILT_IN_AND_AND_FETCH_2:
6859 case BUILT_IN_AND_AND_FETCH_4:
6860 case BUILT_IN_AND_AND_FETCH_8:
6861 case BUILT_IN_AND_AND_FETCH_16:
6862 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6863 target = expand_builtin_sync_operation (mode, exp, AND,
6864 true, target, ignore);
6869 case BUILT_IN_XOR_AND_FETCH_1:
6870 case BUILT_IN_XOR_AND_FETCH_2:
6871 case BUILT_IN_XOR_AND_FETCH_4:
6872 case BUILT_IN_XOR_AND_FETCH_8:
6873 case BUILT_IN_XOR_AND_FETCH_16:
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6875 target = expand_builtin_sync_operation (mode, exp, XOR,
6876 true, target, ignore);
6881 case BUILT_IN_NAND_AND_FETCH_1:
6882 case BUILT_IN_NAND_AND_FETCH_2:
6883 case BUILT_IN_NAND_AND_FETCH_4:
6884 case BUILT_IN_NAND_AND_FETCH_8:
6885 case BUILT_IN_NAND_AND_FETCH_16:
6886 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6887 target = expand_builtin_sync_operation (mode, exp, NOT,
6888 true, target, ignore);
6893 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6894 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6895 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6896 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6897 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6898 if (mode == VOIDmode)
6899 mode = TYPE_MODE (boolean_type_node);
6900 if (!target || !register_operand (target, mode))
6901 target = gen_reg_rtx (mode);
6903 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6904 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6909 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6910 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6911 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6912 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6913 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6914 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6915 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6920 case BUILT_IN_LOCK_TEST_AND_SET_1:
6921 case BUILT_IN_LOCK_TEST_AND_SET_2:
6922 case BUILT_IN_LOCK_TEST_AND_SET_4:
6923 case BUILT_IN_LOCK_TEST_AND_SET_8:
6924 case BUILT_IN_LOCK_TEST_AND_SET_16:
6925 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6926 target = expand_builtin_lock_test_and_set (mode, exp, target);
6931 case BUILT_IN_LOCK_RELEASE_1:
6932 case BUILT_IN_LOCK_RELEASE_2:
6933 case BUILT_IN_LOCK_RELEASE_4:
6934 case BUILT_IN_LOCK_RELEASE_8:
6935 case BUILT_IN_LOCK_RELEASE_16:
6936 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6937 expand_builtin_lock_release (mode, exp);
6940 case BUILT_IN_SYNCHRONIZE:
6941 expand_builtin_synchronize ();
6944 case BUILT_IN_OBJECT_SIZE:
6945 return expand_builtin_object_size (exp);
6947 case BUILT_IN_MEMCPY_CHK:
6948 case BUILT_IN_MEMPCPY_CHK:
6949 case BUILT_IN_MEMMOVE_CHK:
6950 case BUILT_IN_MEMSET_CHK:
6951 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6956 case BUILT_IN_STRCPY_CHK:
6957 case BUILT_IN_STPCPY_CHK:
6958 case BUILT_IN_STRNCPY_CHK:
6959 case BUILT_IN_STRCAT_CHK:
6960 case BUILT_IN_STRNCAT_CHK:
6961 case BUILT_IN_SNPRINTF_CHK:
6962 case BUILT_IN_VSNPRINTF_CHK:
6963 maybe_emit_chk_warning (exp, fcode);
6966 case BUILT_IN_SPRINTF_CHK:
6967 case BUILT_IN_VSPRINTF_CHK:
6968 maybe_emit_sprintf_chk_warning (exp, fcode);
6971 default: /* just do library call, if unknown builtin */
6975 /* The switch statement above can drop through to cause the function
6976 to be called normally. */
6977 return expand_call (exp, target, ignore);
6980 /* Determine whether a tree node represents a call to a built-in
6981 function. If the tree T is a call to a built-in function with
6982 the right number of arguments of the appropriate types, return
6983 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6984 Otherwise the return value is END_BUILTINS. */
6986 enum built_in_function
6987 builtin_mathfn_code (const_tree t)
6989 const_tree fndecl, arg, parmlist;
6990 const_tree argtype, parmtype;
6991 const_call_expr_arg_iterator iter;
6993 if (TREE_CODE (t) != CALL_EXPR
6994 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6995 return END_BUILTINS;
6997 fndecl = get_callee_fndecl (t);
6998 if (fndecl == NULL_TREE
6999 || TREE_CODE (fndecl) != FUNCTION_DECL
7000 || ! DECL_BUILT_IN (fndecl)
7001 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7002 return END_BUILTINS;
7004 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7005 init_const_call_expr_arg_iterator (t, &iter);
7006 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7008 /* If a function doesn't take a variable number of arguments,
7009 the last element in the list will have type `void'. */
7010 parmtype = TREE_VALUE (parmlist);
7011 if (VOID_TYPE_P (parmtype))
7013 if (more_const_call_expr_args_p (&iter))
7014 return END_BUILTINS;
7015 return DECL_FUNCTION_CODE (fndecl);
7018 if (! more_const_call_expr_args_p (&iter))
7019 return END_BUILTINS;
7021 arg = next_const_call_expr_arg (&iter);
7022 argtype = TREE_TYPE (arg);
7024 if (SCALAR_FLOAT_TYPE_P (parmtype))
7026 if (! SCALAR_FLOAT_TYPE_P (argtype))
7027 return END_BUILTINS;
7029 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7031 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7032 return END_BUILTINS;
7034 else if (POINTER_TYPE_P (parmtype))
7036 if (! POINTER_TYPE_P (argtype))
7037 return END_BUILTINS;
7039 else if (INTEGRAL_TYPE_P (parmtype))
7041 if (! INTEGRAL_TYPE_P (argtype))
7042 return END_BUILTINS;
7045 return END_BUILTINS;
7048 /* Variable-length argument list. */
7049 return DECL_FUNCTION_CODE (fndecl);
7052 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7053 evaluate to a constant. */
7056 fold_builtin_constant_p (tree arg)
7058 /* We return 1 for a numeric type that's known to be a constant
7059 value at compile-time or for an aggregate type that's a
7060 literal constant. */
7063 /* If we know this is a constant, emit the constant of one. */
7064 if (CONSTANT_CLASS_P (arg)
7065 || (TREE_CODE (arg) == CONSTRUCTOR
7066 && TREE_CONSTANT (arg)))
7067 return integer_one_node;
7068 if (TREE_CODE (arg) == ADDR_EXPR)
7070 tree op = TREE_OPERAND (arg, 0);
7071 if (TREE_CODE (op) == STRING_CST
7072 || (TREE_CODE (op) == ARRAY_REF
7073 && integer_zerop (TREE_OPERAND (op, 1))
7074 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7075 return integer_one_node;
7078 /* If this expression has side effects, show we don't know it to be a
7079 constant. Likewise if it's a pointer or aggregate type since in
7080 those case we only want literals, since those are only optimized
7081 when generating RTL, not later.
7082 And finally, if we are compiling an initializer, not code, we
7083 need to return a definite result now; there's not going to be any
7084 more optimization done. */
7085 if (TREE_SIDE_EFFECTS (arg)
7086 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7087 || POINTER_TYPE_P (TREE_TYPE (arg))
7089 || folding_initializer)
7090 return integer_zero_node;
7095 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7096 comparison against the argument will fold to a constant. In practice,
7097 this means a true constant or the address of a non-weak symbol. */
7100 fold_builtin_expect (tree arg)
7104 /* If the argument isn't invariant, then there's nothing we can do. */
7105 if (!TREE_INVARIANT (arg))
7108 /* If we're looking at an address of a weak decl, then do not fold. */
7111 if (TREE_CODE (inner) == ADDR_EXPR)
7115 inner = TREE_OPERAND (inner, 0);
7117 while (TREE_CODE (inner) == COMPONENT_REF
7118 || TREE_CODE (inner) == ARRAY_REF);
7119 if (DECL_P (inner) && DECL_WEAK (inner))
7123 /* Otherwise, ARG already has the proper type for the return value. */
7127 /* Fold a call to __builtin_classify_type with argument ARG. */
7130 fold_builtin_classify_type (tree arg)
7133 return build_int_cst (NULL_TREE, no_type_class);
7135 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7138 /* Fold a call to __builtin_strlen with argument ARG. */
7141 fold_builtin_strlen (tree arg)
7143 if (!validate_arg (arg, POINTER_TYPE))
7147 tree len = c_strlen (arg, 0);
7151 /* Convert from the internal "sizetype" type to "size_t". */
7153 len = fold_convert (size_type_node, len);
7161 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7164 fold_builtin_inf (tree type, int warn)
7166 REAL_VALUE_TYPE real;
7168 /* __builtin_inff is intended to be usable to define INFINITY on all
7169 targets. If an infinity is not available, INFINITY expands "to a
7170 positive constant of type float that overflows at translation
7171 time", footnote "In this case, using INFINITY will violate the
7172 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7173 Thus we pedwarn to ensure this constraint violation is
7175 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7176 pedwarn ("target format does not support infinity");
7179 return build_real (type, real);
7182 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7185 fold_builtin_nan (tree arg, tree type, int quiet)
7187 REAL_VALUE_TYPE real;
7190 if (!validate_arg (arg, POINTER_TYPE))
7192 str = c_getstr (arg);
7196 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7199 return build_real (type, real);
7202 /* Return true if the floating point expression T has an integer value.
7203 We also allow +Inf, -Inf and NaN to be considered integer values. */
7206 integer_valued_real_p (tree t)
7208 switch (TREE_CODE (t))
7215 case NON_LVALUE_EXPR:
7216 return integer_valued_real_p (TREE_OPERAND (t, 0));
7221 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7228 return integer_valued_real_p (TREE_OPERAND (t, 0))
7229 && integer_valued_real_p (TREE_OPERAND (t, 1));
7232 return integer_valued_real_p (TREE_OPERAND (t, 1))
7233 && integer_valued_real_p (TREE_OPERAND (t, 2));
7236 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7240 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7241 if (TREE_CODE (type) == INTEGER_TYPE)
7243 if (TREE_CODE (type) == REAL_TYPE)
7244 return integer_valued_real_p (TREE_OPERAND (t, 0));
7249 switch (builtin_mathfn_code (t))
7251 CASE_FLT_FN (BUILT_IN_CEIL):
7252 CASE_FLT_FN (BUILT_IN_FLOOR):
7253 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7254 CASE_FLT_FN (BUILT_IN_RINT):
7255 CASE_FLT_FN (BUILT_IN_ROUND):
7256 CASE_FLT_FN (BUILT_IN_TRUNC):
7259 CASE_FLT_FN (BUILT_IN_FMIN):
7260 CASE_FLT_FN (BUILT_IN_FMAX):
7261 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7262 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7275 /* FNDECL is assumed to be a builtin where truncation can be propagated
7276 across (for instance floor((double)f) == (double)floorf (f).
7277 Do the transformation for a call with argument ARG. */
7280 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7282 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7284 if (!validate_arg (arg, REAL_TYPE))
7287 /* Integer rounding functions are idempotent. */
7288 if (fcode == builtin_mathfn_code (arg))
7291 /* If argument is already integer valued, and we don't need to worry
7292 about setting errno, there's no need to perform rounding. */
7293 if (! flag_errno_math && integer_valued_real_p (arg))
7298 tree arg0 = strip_float_extensions (arg);
7299 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7300 tree newtype = TREE_TYPE (arg0);
7303 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7304 && (decl = mathfn_built_in (newtype, fcode)))
7305 return fold_convert (ftype,
7306 build_call_expr (decl, 1,
7307 fold_convert (newtype, arg0)));
7312 /* FNDECL is assumed to be builtin which can narrow the FP type of
7313 the argument, for instance lround((double)f) -> lroundf (f).
7314 Do the transformation for a call with argument ARG. */
7317 fold_fixed_mathfn (tree fndecl, tree arg)
7319 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7321 if (!validate_arg (arg, REAL_TYPE))
7324 /* If argument is already integer valued, and we don't need to worry
7325 about setting errno, there's no need to perform rounding. */
7326 if (! flag_errno_math && integer_valued_real_p (arg))
7327 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7331 tree ftype = TREE_TYPE (arg);
7332 tree arg0 = strip_float_extensions (arg);
7333 tree newtype = TREE_TYPE (arg0);
7336 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7337 && (decl = mathfn_built_in (newtype, fcode)))
7338 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7341 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7342 sizeof (long long) == sizeof (long). */
7343 if (TYPE_PRECISION (long_long_integer_type_node)
7344 == TYPE_PRECISION (long_integer_type_node))
7346 tree newfn = NULL_TREE;
7349 CASE_FLT_FN (BUILT_IN_LLCEIL):
7350 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7353 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7354 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7357 CASE_FLT_FN (BUILT_IN_LLROUND):
7358 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7361 CASE_FLT_FN (BUILT_IN_LLRINT):
7362 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7371 tree newcall = build_call_expr(newfn, 1, arg);
7372 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7379 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7380 return type. Return NULL_TREE if no simplification can be made. */
7383 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7387 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7388 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7391 /* Calculate the result when the argument is a constant. */
7392 if (TREE_CODE (arg) == COMPLEX_CST
7393 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7397 if (TREE_CODE (arg) == COMPLEX_EXPR)
7399 tree real = TREE_OPERAND (arg, 0);
7400 tree imag = TREE_OPERAND (arg, 1);
7402 /* If either part is zero, cabs is fabs of the other. */
7403 if (real_zerop (real))
7404 return fold_build1 (ABS_EXPR, type, imag);
7405 if (real_zerop (imag))
7406 return fold_build1 (ABS_EXPR, type, real);
7408 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7409 if (flag_unsafe_math_optimizations
7410 && operand_equal_p (real, imag, OEP_PURE_SAME))
7412 const REAL_VALUE_TYPE sqrt2_trunc
7413 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7415 return fold_build2 (MULT_EXPR, type,
7416 fold_build1 (ABS_EXPR, type, real),
7417 build_real (type, sqrt2_trunc));
7421 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7422 if (TREE_CODE (arg) == NEGATE_EXPR
7423 || TREE_CODE (arg) == CONJ_EXPR)
7424 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7426 /* Don't do this when optimizing for size. */
7427 if (flag_unsafe_math_optimizations
7428 && optimize && !optimize_size)
7430 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7432 if (sqrtfn != NULL_TREE)
7434 tree rpart, ipart, result;
7436 arg = builtin_save_expr (arg);
7438 rpart = fold_build1 (REALPART_EXPR, type, arg);
7439 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7441 rpart = builtin_save_expr (rpart);
7442 ipart = builtin_save_expr (ipart);
7444 result = fold_build2 (PLUS_EXPR, type,
7445 fold_build2 (MULT_EXPR, type,
7447 fold_build2 (MULT_EXPR, type,
7450 return build_call_expr (sqrtfn, 1, result);
7457 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7458 Return NULL_TREE if no simplification can be made. */
7461 fold_builtin_sqrt (tree arg, tree type)
7464 enum built_in_function fcode;
7467 if (!validate_arg (arg, REAL_TYPE))
7470 /* Calculate the result when the argument is a constant. */
7471 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7474 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7475 fcode = builtin_mathfn_code (arg);
7476 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7478 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7479 arg = fold_build2 (MULT_EXPR, type,
7480 CALL_EXPR_ARG (arg, 0),
7481 build_real (type, dconsthalf));
7482 return build_call_expr (expfn, 1, arg);
7485 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7486 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7488 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7492 tree arg0 = CALL_EXPR_ARG (arg, 0);
7494 /* The inner root was either sqrt or cbrt. */
7495 REAL_VALUE_TYPE dconstroot =
7496 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7498 /* Adjust for the outer root. */
7499 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7500 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7501 tree_root = build_real (type, dconstroot);
7502 return build_call_expr (powfn, 2, arg0, tree_root);
7506 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7507 if (flag_unsafe_math_optimizations
7508 && (fcode == BUILT_IN_POW
7509 || fcode == BUILT_IN_POWF
7510 || fcode == BUILT_IN_POWL))
7512 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7513 tree arg0 = CALL_EXPR_ARG (arg, 0);
7514 tree arg1 = CALL_EXPR_ARG (arg, 1);
7516 if (!tree_expr_nonnegative_p (arg0))
7517 arg0 = build1 (ABS_EXPR, type, arg0);
7518 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7519 build_real (type, dconsthalf));
7520 return build_call_expr (powfn, 2, arg0, narg1);
7526 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7527 Return NULL_TREE if no simplification can be made. */
7530 fold_builtin_cbrt (tree arg, tree type)
7532 const enum built_in_function fcode = builtin_mathfn_code (arg);
7535 if (!validate_arg (arg, REAL_TYPE))
7538 /* Calculate the result when the argument is a constant. */
7539 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7542 if (flag_unsafe_math_optimizations)
7544 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7545 if (BUILTIN_EXPONENT_P (fcode))
7547 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7548 const REAL_VALUE_TYPE third_trunc =
7549 real_value_truncate (TYPE_MODE (type), dconstthird);
7550 arg = fold_build2 (MULT_EXPR, type,
7551 CALL_EXPR_ARG (arg, 0),
7552 build_real (type, third_trunc));
7553 return build_call_expr (expfn, 1, arg);
7556 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7557 if (BUILTIN_SQRT_P (fcode))
7559 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7563 tree arg0 = CALL_EXPR_ARG (arg, 0);
7565 REAL_VALUE_TYPE dconstroot = dconstthird;
7567 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7568 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7569 tree_root = build_real (type, dconstroot);
7570 return build_call_expr (powfn, 2, arg0, tree_root);
7574 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7575 if (BUILTIN_CBRT_P (fcode))
7577 tree arg0 = CALL_EXPR_ARG (arg, 0);
7578 if (tree_expr_nonnegative_p (arg0))
7580 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7585 REAL_VALUE_TYPE dconstroot;
7587 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7588 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7589 tree_root = build_real (type, dconstroot);
7590 return build_call_expr (powfn, 2, arg0, tree_root);
7595 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7596 if (fcode == BUILT_IN_POW
7597 || fcode == BUILT_IN_POWF
7598 || fcode == BUILT_IN_POWL)
7600 tree arg00 = CALL_EXPR_ARG (arg, 0);
7601 tree arg01 = CALL_EXPR_ARG (arg, 1);
7602 if (tree_expr_nonnegative_p (arg00))
7604 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7605 const REAL_VALUE_TYPE dconstroot
7606 = real_value_truncate (TYPE_MODE (type), dconstthird);
7607 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7608 build_real (type, dconstroot));
7609 return build_call_expr (powfn, 2, arg00, narg01);
7616 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7617 TYPE is the type of the return value. Return NULL_TREE if no
7618 simplification can be made. */
7621 fold_builtin_cos (tree arg, tree type, tree fndecl)
7625 if (!validate_arg (arg, REAL_TYPE))
7628 /* Calculate the result when the argument is a constant. */
7629 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7632 /* Optimize cos(-x) into cos (x). */
7633 if ((narg = fold_strip_sign_ops (arg)))
7634 return build_call_expr (fndecl, 1, narg);
7639 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7640 Return NULL_TREE if no simplification can be made. */
7643 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7645 if (validate_arg (arg, REAL_TYPE))
7649 /* Calculate the result when the argument is a constant. */
7650 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7653 /* Optimize cosh(-x) into cosh (x). */
7654 if ((narg = fold_strip_sign_ops (arg)))
7655 return build_call_expr (fndecl, 1, narg);
7661 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7662 Return NULL_TREE if no simplification can be made. */
7665 fold_builtin_tan (tree arg, tree type)
7667 enum built_in_function fcode;
7670 if (!validate_arg (arg, REAL_TYPE))
7673 /* Calculate the result when the argument is a constant. */
7674 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7677 /* Optimize tan(atan(x)) = x. */
7678 fcode = builtin_mathfn_code (arg);
7679 if (flag_unsafe_math_optimizations
7680 && (fcode == BUILT_IN_ATAN
7681 || fcode == BUILT_IN_ATANF
7682 || fcode == BUILT_IN_ATANL))
7683 return CALL_EXPR_ARG (arg, 0);
7688 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7689 NULL_TREE if no simplification can be made. */
7692 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7697 if (!validate_arg (arg0, REAL_TYPE)
7698 || !validate_arg (arg1, POINTER_TYPE)
7699 || !validate_arg (arg2, POINTER_TYPE))
7702 type = TREE_TYPE (arg0);
7704 /* Calculate the result when the argument is a constant. */
7705 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7708 /* Canonicalize sincos to cexpi. */
7709 if (!TARGET_C99_FUNCTIONS)
7711 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7715 call = build_call_expr (fn, 1, arg0);
7716 call = builtin_save_expr (call);
7718 return build2 (COMPOUND_EXPR, type,
7719 build2 (MODIFY_EXPR, void_type_node,
7720 build_fold_indirect_ref (arg1),
7721 build1 (IMAGPART_EXPR, type, call)),
7722 build2 (MODIFY_EXPR, void_type_node,
7723 build_fold_indirect_ref (arg2),
7724 build1 (REALPART_EXPR, type, call)));
7727 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7728 NULL_TREE if no simplification can be made. */
7731 fold_builtin_cexp (tree arg0, tree type)
7734 tree realp, imagp, ifn;
7736 if (!validate_arg (arg0, COMPLEX_TYPE))
7739 rtype = TREE_TYPE (TREE_TYPE (arg0));
7741 /* In case we can figure out the real part of arg0 and it is constant zero
7743 if (!TARGET_C99_FUNCTIONS)
7745 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7749 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7750 && real_zerop (realp))
7752 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7753 return build_call_expr (ifn, 1, narg);
7756 /* In case we can easily decompose real and imaginary parts split cexp
7757 to exp (r) * cexpi (i). */
7758 if (flag_unsafe_math_optimizations
7761 tree rfn, rcall, icall;
7763 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7767 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7771 icall = build_call_expr (ifn, 1, imagp);
7772 icall = builtin_save_expr (icall);
7773 rcall = build_call_expr (rfn, 1, realp);
7774 rcall = builtin_save_expr (rcall);
7775 return build2 (COMPLEX_EXPR, type,
7776 build2 (MULT_EXPR, rtype,
7778 build1 (REALPART_EXPR, rtype, icall)),
7779 build2 (MULT_EXPR, rtype,
7781 build1 (IMAGPART_EXPR, rtype, icall)));
7787 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7788 Return NULL_TREE if no simplification can be made. */
7791 fold_builtin_trunc (tree fndecl, tree arg)
7793 if (!validate_arg (arg, REAL_TYPE))
7796 /* Optimize trunc of constant value. */
7797 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7799 REAL_VALUE_TYPE r, x;
7800 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7802 x = TREE_REAL_CST (arg);
7803 real_trunc (&r, TYPE_MODE (type), &x);
7804 return build_real (type, r);
7807 return fold_trunc_transparent_mathfn (fndecl, arg);
7810 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7811 Return NULL_TREE if no simplification can be made. */
7814 fold_builtin_floor (tree fndecl, tree arg)
7816 if (!validate_arg (arg, REAL_TYPE))
7819 /* Optimize floor of constant value. */
7820 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7824 x = TREE_REAL_CST (arg);
7825 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7827 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7830 real_floor (&r, TYPE_MODE (type), &x);
7831 return build_real (type, r);
7835 /* Fold floor (x) where x is nonnegative to trunc (x). */
7836 if (tree_expr_nonnegative_p (arg))
7838 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7840 return build_call_expr (truncfn, 1, arg);
7843 return fold_trunc_transparent_mathfn (fndecl, arg);
7846 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7847 Return NULL_TREE if no simplification can be made. */
7850 fold_builtin_ceil (tree fndecl, tree arg)
7852 if (!validate_arg (arg, REAL_TYPE))
7855 /* Optimize ceil of constant value. */
7856 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7860 x = TREE_REAL_CST (arg);
7861 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7866 real_ceil (&r, TYPE_MODE (type), &x);
7867 return build_real (type, r);
7871 return fold_trunc_transparent_mathfn (fndecl, arg);
7874 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7875 Return NULL_TREE if no simplification can be made. */
7878 fold_builtin_round (tree fndecl, tree arg)
7880 if (!validate_arg (arg, REAL_TYPE))
7883 /* Optimize round of constant value. */
7884 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7888 x = TREE_REAL_CST (arg);
7889 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7891 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7894 real_round (&r, TYPE_MODE (type), &x);
7895 return build_real (type, r);
7899 return fold_trunc_transparent_mathfn (fndecl, arg);
7902 /* Fold function call to builtin lround, lroundf or lroundl (or the
7903 corresponding long long versions) and other rounding functions. ARG
7904 is the argument to the call. Return NULL_TREE if no simplification
7908 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7910 if (!validate_arg (arg, REAL_TYPE))
7913 /* Optimize lround of constant value. */
7914 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7916 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7918 if (real_isfinite (&x))
7920 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7921 tree ftype = TREE_TYPE (arg);
7922 unsigned HOST_WIDE_INT lo2;
7923 HOST_WIDE_INT hi, lo;
7926 switch (DECL_FUNCTION_CODE (fndecl))
7928 CASE_FLT_FN (BUILT_IN_LFLOOR):
7929 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7930 real_floor (&r, TYPE_MODE (ftype), &x);
7933 CASE_FLT_FN (BUILT_IN_LCEIL):
7934 CASE_FLT_FN (BUILT_IN_LLCEIL):
7935 real_ceil (&r, TYPE_MODE (ftype), &x);
7938 CASE_FLT_FN (BUILT_IN_LROUND):
7939 CASE_FLT_FN (BUILT_IN_LLROUND):
7940 real_round (&r, TYPE_MODE (ftype), &x);
7947 REAL_VALUE_TO_INT (&lo, &hi, r);
7948 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7949 return build_int_cst_wide (itype, lo2, hi);
7953 switch (DECL_FUNCTION_CODE (fndecl))
7955 CASE_FLT_FN (BUILT_IN_LFLOOR):
7956 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7957 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7958 if (tree_expr_nonnegative_p (arg))
7959 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7965 return fold_fixed_mathfn (fndecl, arg);
7968 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7969 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7970 the argument to the call. Return NULL_TREE if no simplification can
7974 fold_builtin_bitop (tree fndecl, tree arg)
7976 if (!validate_arg (arg, INTEGER_TYPE))
7979 /* Optimize for constant argument. */
7980 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7982 HOST_WIDE_INT hi, width, result;
7983 unsigned HOST_WIDE_INT lo;
7986 type = TREE_TYPE (arg);
7987 width = TYPE_PRECISION (type);
7988 lo = TREE_INT_CST_LOW (arg);
7990 /* Clear all the bits that are beyond the type's precision. */
7991 if (width > HOST_BITS_PER_WIDE_INT)
7993 hi = TREE_INT_CST_HIGH (arg);
7994 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7995 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8000 if (width < HOST_BITS_PER_WIDE_INT)
8001 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8004 switch (DECL_FUNCTION_CODE (fndecl))
8006 CASE_INT_FN (BUILT_IN_FFS):
8008 result = exact_log2 (lo & -lo) + 1;
8010 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8015 CASE_INT_FN (BUILT_IN_CLZ):
8017 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8019 result = width - floor_log2 (lo) - 1;
8020 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8024 CASE_INT_FN (BUILT_IN_CTZ):
8026 result = exact_log2 (lo & -lo);
8028 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8029 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8033 CASE_INT_FN (BUILT_IN_POPCOUNT):
8036 result++, lo &= lo - 1;
8038 result++, hi &= hi - 1;
8041 CASE_INT_FN (BUILT_IN_PARITY):
8044 result++, lo &= lo - 1;
8046 result++, hi &= hi - 1;
8054 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8060 /* Fold function call to builtin_bswap and the long and long long
8061 variants. Return NULL_TREE if no simplification can be made. */
8063 fold_builtin_bswap (tree fndecl, tree arg)
8065 if (! validate_arg (arg, INTEGER_TYPE))
8068 /* Optimize constant value. */
8069 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8071 HOST_WIDE_INT hi, width, r_hi = 0;
8072 unsigned HOST_WIDE_INT lo, r_lo = 0;
8075 type = TREE_TYPE (arg);
8076 width = TYPE_PRECISION (type);
8077 lo = TREE_INT_CST_LOW (arg);
8078 hi = TREE_INT_CST_HIGH (arg);
8080 switch (DECL_FUNCTION_CODE (fndecl))
8082 case BUILT_IN_BSWAP32:
8083 case BUILT_IN_BSWAP64:
8087 for (s = 0; s < width; s += 8)
8089 int d = width - s - 8;
8090 unsigned HOST_WIDE_INT byte;
8092 if (s < HOST_BITS_PER_WIDE_INT)
8093 byte = (lo >> s) & 0xff;
8095 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8097 if (d < HOST_BITS_PER_WIDE_INT)
8100 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8110 if (width < HOST_BITS_PER_WIDE_INT)
8111 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8113 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8119 /* Return true if EXPR is the real constant contained in VALUE. */
8122 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8126 return ((TREE_CODE (expr) == REAL_CST
8127 && !TREE_OVERFLOW (expr)
8128 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8129 || (TREE_CODE (expr) == COMPLEX_CST
8130 && real_dconstp (TREE_REALPART (expr), value)
8131 && real_zerop (TREE_IMAGPART (expr))));
8134 /* A subroutine of fold_builtin to fold the various logarithmic
8135 functions. Return NULL_TREE if no simplification can me made.
8136 FUNC is the corresponding MPFR logarithm function. */
8139 fold_builtin_logarithm (tree fndecl, tree arg,
8140 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8142 if (validate_arg (arg, REAL_TYPE))
8144 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8146 const enum built_in_function fcode = builtin_mathfn_code (arg);
8148 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8149 instead we'll look for 'e' truncated to MODE. So only do
8150 this if flag_unsafe_math_optimizations is set. */
8151 if (flag_unsafe_math_optimizations && func == mpfr_log)
8153 const REAL_VALUE_TYPE e_truncated =
8154 real_value_truncate (TYPE_MODE (type), dconste);
8155 if (real_dconstp (arg, &e_truncated))
8156 return build_real (type, dconst1);
8159 /* Calculate the result when the argument is a constant. */
8160 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8163 /* Special case, optimize logN(expN(x)) = x. */
8164 if (flag_unsafe_math_optimizations
8165 && ((func == mpfr_log
8166 && (fcode == BUILT_IN_EXP
8167 || fcode == BUILT_IN_EXPF
8168 || fcode == BUILT_IN_EXPL))
8169 || (func == mpfr_log2
8170 && (fcode == BUILT_IN_EXP2
8171 || fcode == BUILT_IN_EXP2F
8172 || fcode == BUILT_IN_EXP2L))
8173 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8174 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8176 /* Optimize logN(func()) for various exponential functions. We
8177 want to determine the value "x" and the power "exponent" in
8178 order to transform logN(x**exponent) into exponent*logN(x). */
8179 if (flag_unsafe_math_optimizations)
8181 tree exponent = 0, x = 0;
8185 CASE_FLT_FN (BUILT_IN_EXP):
8186 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8187 x = build_real (type,
8188 real_value_truncate (TYPE_MODE (type), dconste));
8189 exponent = CALL_EXPR_ARG (arg, 0);
8191 CASE_FLT_FN (BUILT_IN_EXP2):
8192 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8193 x = build_real (type, dconst2);
8194 exponent = CALL_EXPR_ARG (arg, 0);
8196 CASE_FLT_FN (BUILT_IN_EXP10):
8197 CASE_FLT_FN (BUILT_IN_POW10):
8198 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8199 x = build_real (type, dconst10);
8200 exponent = CALL_EXPR_ARG (arg, 0);
8202 CASE_FLT_FN (BUILT_IN_SQRT):
8203 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8204 x = CALL_EXPR_ARG (arg, 0);
8205 exponent = build_real (type, dconsthalf);
8207 CASE_FLT_FN (BUILT_IN_CBRT):
8208 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8209 x = CALL_EXPR_ARG (arg, 0);
8210 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8213 CASE_FLT_FN (BUILT_IN_POW):
8214 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8215 x = CALL_EXPR_ARG (arg, 0);
8216 exponent = CALL_EXPR_ARG (arg, 1);
8222 /* Now perform the optimization. */
8225 tree logfn = build_call_expr (fndecl, 1, x);
8226 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8234 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8235 NULL_TREE if no simplification can be made. */
8238 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8240 tree res, narg0, narg1;
8242 if (!validate_arg (arg0, REAL_TYPE)
8243 || !validate_arg (arg1, REAL_TYPE))
8246 /* Calculate the result when the argument is a constant. */
8247 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8250 /* If either argument to hypot has a negate or abs, strip that off.
8251 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8252 narg0 = fold_strip_sign_ops (arg0);
8253 narg1 = fold_strip_sign_ops (arg1);
8256 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8257 narg1 ? narg1 : arg1);
8260 /* If either argument is zero, hypot is fabs of the other. */
8261 if (real_zerop (arg0))
8262 return fold_build1 (ABS_EXPR, type, arg1);
8263 else if (real_zerop (arg1))
8264 return fold_build1 (ABS_EXPR, type, arg0);
8266 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8267 if (flag_unsafe_math_optimizations
8268 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8270 const REAL_VALUE_TYPE sqrt2_trunc
8271 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8272 return fold_build2 (MULT_EXPR, type,
8273 fold_build1 (ABS_EXPR, type, arg0),
8274 build_real (type, sqrt2_trunc));
8281 /* Fold a builtin function call to pow, powf, or powl. Return
8282 NULL_TREE if no simplification can be made. */
8284 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8288 if (!validate_arg (arg0, REAL_TYPE)
8289 || !validate_arg (arg1, REAL_TYPE))
8292 /* Calculate the result when the argument is a constant. */
8293 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8296 /* Optimize pow(1.0,y) = 1.0. */
8297 if (real_onep (arg0))
8298 return omit_one_operand (type, build_real (type, dconst1), arg1);
8300 if (TREE_CODE (arg1) == REAL_CST
8301 && !TREE_OVERFLOW (arg1))
8303 REAL_VALUE_TYPE cint;
8307 c = TREE_REAL_CST (arg1);
8309 /* Optimize pow(x,0.0) = 1.0. */
8310 if (REAL_VALUES_EQUAL (c, dconst0))
8311 return omit_one_operand (type, build_real (type, dconst1),
8314 /* Optimize pow(x,1.0) = x. */
8315 if (REAL_VALUES_EQUAL (c, dconst1))
8318 /* Optimize pow(x,-1.0) = 1.0/x. */
8319 if (REAL_VALUES_EQUAL (c, dconstm1))
8320 return fold_build2 (RDIV_EXPR, type,
8321 build_real (type, dconst1), arg0);
8323 /* Optimize pow(x,0.5) = sqrt(x). */
8324 if (flag_unsafe_math_optimizations
8325 && REAL_VALUES_EQUAL (c, dconsthalf))
8327 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8329 if (sqrtfn != NULL_TREE)
8330 return build_call_expr (sqrtfn, 1, arg0);
8333 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8334 if (flag_unsafe_math_optimizations)
8336 const REAL_VALUE_TYPE dconstroot
8337 = real_value_truncate (TYPE_MODE (type), dconstthird);
8339 if (REAL_VALUES_EQUAL (c, dconstroot))
8341 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8342 if (cbrtfn != NULL_TREE)
8343 return build_call_expr (cbrtfn, 1, arg0);
8347 /* Check for an integer exponent. */
8348 n = real_to_integer (&c);
8349 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8350 if (real_identical (&c, &cint))
8352 /* Attempt to evaluate pow at compile-time. */
8353 if (TREE_CODE (arg0) == REAL_CST
8354 && !TREE_OVERFLOW (arg0))
8359 x = TREE_REAL_CST (arg0);
8360 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8361 if (flag_unsafe_math_optimizations || !inexact)
8362 return build_real (type, x);
8365 /* Strip sign ops from even integer powers. */
8366 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8368 tree narg0 = fold_strip_sign_ops (arg0);
8370 return build_call_expr (fndecl, 2, narg0, arg1);
8375 if (flag_unsafe_math_optimizations)
8377 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8379 /* Optimize pow(expN(x),y) = expN(x*y). */
8380 if (BUILTIN_EXPONENT_P (fcode))
8382 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8383 tree arg = CALL_EXPR_ARG (arg0, 0);
8384 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8385 return build_call_expr (expfn, 1, arg);
8388 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8389 if (BUILTIN_SQRT_P (fcode))
8391 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8392 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8393 build_real (type, dconsthalf));
8394 return build_call_expr (fndecl, 2, narg0, narg1);
8397 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8398 if (BUILTIN_CBRT_P (fcode))
8400 tree arg = CALL_EXPR_ARG (arg0, 0);
8401 if (tree_expr_nonnegative_p (arg))
8403 const REAL_VALUE_TYPE dconstroot
8404 = real_value_truncate (TYPE_MODE (type), dconstthird);
8405 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8406 build_real (type, dconstroot));
8407 return build_call_expr (fndecl, 2, arg, narg1);
8411 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8412 if (fcode == BUILT_IN_POW
8413 || fcode == BUILT_IN_POWF
8414 || fcode == BUILT_IN_POWL)
8416 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8417 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8418 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8419 return build_call_expr (fndecl, 2, arg00, narg1);
8426 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8427 Return NULL_TREE if no simplification can be made. */
8429 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8430 tree arg0, tree arg1, tree type)
8432 if (!validate_arg (arg0, REAL_TYPE)
8433 || !validate_arg (arg1, INTEGER_TYPE))
8436 /* Optimize pow(1.0,y) = 1.0. */
8437 if (real_onep (arg0))
8438 return omit_one_operand (type, build_real (type, dconst1), arg1);
8440 if (host_integerp (arg1, 0))
8442 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8444 /* Evaluate powi at compile-time. */
8445 if (TREE_CODE (arg0) == REAL_CST
8446 && !TREE_OVERFLOW (arg0))
8449 x = TREE_REAL_CST (arg0);
8450 real_powi (&x, TYPE_MODE (type), &x, c);
8451 return build_real (type, x);
8454 /* Optimize pow(x,0) = 1.0. */
8456 return omit_one_operand (type, build_real (type, dconst1),
8459 /* Optimize pow(x,1) = x. */
8463 /* Optimize pow(x,-1) = 1.0/x. */
8465 return fold_build2 (RDIV_EXPR, type,
8466 build_real (type, dconst1), arg0);
8472 /* A subroutine of fold_builtin to fold the various exponent
8473 functions. Return NULL_TREE if no simplification can be made.
8474 FUNC is the corresponding MPFR exponent function. */
8477 fold_builtin_exponent (tree fndecl, tree arg,
8478 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8480 if (validate_arg (arg, REAL_TYPE))
8482 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8485 /* Calculate the result when the argument is a constant. */
8486 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8489 /* Optimize expN(logN(x)) = x. */
8490 if (flag_unsafe_math_optimizations)
8492 const enum built_in_function fcode = builtin_mathfn_code (arg);
8494 if ((func == mpfr_exp
8495 && (fcode == BUILT_IN_LOG
8496 || fcode == BUILT_IN_LOGF
8497 || fcode == BUILT_IN_LOGL))
8498 || (func == mpfr_exp2
8499 && (fcode == BUILT_IN_LOG2
8500 || fcode == BUILT_IN_LOG2F
8501 || fcode == BUILT_IN_LOG2L))
8502 || (func == mpfr_exp10
8503 && (fcode == BUILT_IN_LOG10
8504 || fcode == BUILT_IN_LOG10F
8505 || fcode == BUILT_IN_LOG10L)))
8506 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8513 /* Return true if VAR is a VAR_DECL or a component thereof. */
8516 var_decl_component_p (tree var)
8519 while (handled_component_p (inner))
8520 inner = TREE_OPERAND (inner, 0);
8521 return SSA_VAR_P (inner);
8524 /* Fold function call to builtin memset. Return
8525 NULL_TREE if no simplification can be made. */
8528 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8531 unsigned HOST_WIDE_INT length, cval;
8533 if (! validate_arg (dest, POINTER_TYPE)
8534 || ! validate_arg (c, INTEGER_TYPE)
8535 || ! validate_arg (len, INTEGER_TYPE))
8538 if (! host_integerp (len, 1))
8541 /* If the LEN parameter is zero, return DEST. */
8542 if (integer_zerop (len))
8543 return omit_one_operand (type, dest, c);
8545 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8550 if (TREE_CODE (var) != ADDR_EXPR)
8553 var = TREE_OPERAND (var, 0);
8554 if (TREE_THIS_VOLATILE (var))
8557 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8558 && !POINTER_TYPE_P (TREE_TYPE (var)))
8561 if (! var_decl_component_p (var))
8564 length = tree_low_cst (len, 1);
8565 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8566 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8570 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8573 if (integer_zerop (c))
8577 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8580 cval = tree_low_cst (c, 1);
8584 cval |= (cval << 31) << 1;
8587 ret = build_int_cst_type (TREE_TYPE (var), cval);
8588 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8592 return omit_one_operand (type, dest, ret);
8595 /* Fold function call to builtin memset. Return
8596 NULL_TREE if no simplification can be made. */
8599 fold_builtin_bzero (tree dest, tree size, bool ignore)
8601 if (! validate_arg (dest, POINTER_TYPE)
8602 || ! validate_arg (size, INTEGER_TYPE))
8608 /* New argument list transforming bzero(ptr x, int y) to
8609 memset(ptr x, int 0, size_t y). This is done this way
8610 so that if it isn't expanded inline, we fallback to
8611 calling bzero instead of memset. */
8613 return fold_builtin_memset (dest, integer_zero_node,
8614 fold_convert (sizetype, size),
8615 void_type_node, ignore);
8618 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8619 NULL_TREE if no simplification can be made.
8620 If ENDP is 0, return DEST (like memcpy).
8621 If ENDP is 1, return DEST+LEN (like mempcpy).
8622 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8623 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8627 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8629 tree destvar, srcvar, expr;
8631 if (! validate_arg (dest, POINTER_TYPE)
8632 || ! validate_arg (src, POINTER_TYPE)
8633 || ! validate_arg (len, INTEGER_TYPE))
8636 /* If the LEN parameter is zero, return DEST. */
8637 if (integer_zerop (len))
8638 return omit_one_operand (type, dest, src);
8640 /* If SRC and DEST are the same (and not volatile), return
8641 DEST{,+LEN,+LEN-1}. */
8642 if (operand_equal_p (src, dest, 0))
8646 tree srctype, desttype;
8649 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8650 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8652 /* Both DEST and SRC must be pointer types.
8653 ??? This is what old code did. Is the testing for pointer types
8656 If either SRC is readonly or length is 1, we can use memcpy. */
8657 if (dest_align && src_align
8658 && (readonly_data_expr (src)
8659 || (host_integerp (len, 1)
8660 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8661 tree_low_cst (len, 1)))))
8663 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8666 return build_call_expr (fn, 3, dest, src, len);
8671 if (!host_integerp (len, 0))
8674 This logic lose for arguments like (type *)malloc (sizeof (type)),
8675 since we strip the casts of up to VOID return value from malloc.
8676 Perhaps we ought to inherit type from non-VOID argument here? */
8679 srctype = TREE_TYPE (TREE_TYPE (src));
8680 desttype = TREE_TYPE (TREE_TYPE (dest));
8681 if (!srctype || !desttype
8682 || !TYPE_SIZE_UNIT (srctype)
8683 || !TYPE_SIZE_UNIT (desttype)
8684 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8685 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8686 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8687 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8690 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8691 < (int) TYPE_ALIGN (desttype)
8692 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8693 < (int) TYPE_ALIGN (srctype)))
8697 dest = builtin_save_expr (dest);
8699 srcvar = build_fold_indirect_ref (src);
8700 if (TREE_THIS_VOLATILE (srcvar))
8702 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8704 /* With memcpy, it is possible to bypass aliasing rules, so without
8705 this check i. e. execute/20060930-2.c would be misoptimized, because
8706 it use conflicting alias set to hold argument for the memcpy call.
8707 This check is probably unnecesary with -fno-strict-aliasing.
8708 Similarly for destvar. See also PR29286. */
8709 if (!var_decl_component_p (srcvar)
8710 /* Accept: memcpy (*char_var, "test", 1); that simplify
8712 || is_gimple_min_invariant (srcvar)
8713 || readonly_data_expr (src))
8716 destvar = build_fold_indirect_ref (dest);
8717 if (TREE_THIS_VOLATILE (destvar))
8719 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8721 if (!var_decl_component_p (destvar))
8724 if (srctype == desttype
8725 || (gimple_in_ssa_p (cfun)
8726 && useless_type_conversion_p (desttype, srctype)))
8728 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8729 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8730 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8731 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8732 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8734 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8735 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8741 if (endp == 0 || endp == 3)
8742 return omit_one_operand (type, dest, expr);
8748 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8751 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8752 dest = fold_convert (type, dest);
8754 dest = omit_one_operand (type, dest, expr);
8758 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8759 If LEN is not NULL, it represents the length of the string to be
8760 copied. Return NULL_TREE if no simplification can be made. */
8763 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8767 if (!validate_arg (dest, POINTER_TYPE)
8768 || !validate_arg (src, POINTER_TYPE))
8771 /* If SRC and DEST are the same (and not volatile), return DEST. */
8772 if (operand_equal_p (src, dest, 0))
8773 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8778 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8784 len = c_strlen (src, 1);
8785 if (! len || TREE_SIDE_EFFECTS (len))
8789 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8790 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8791 build_call_expr (fn, 3, dest, src, len));
8794 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8795 If SLEN is not NULL, it represents the length of the source string.
8796 Return NULL_TREE if no simplification can be made. */
8799 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8803 if (!validate_arg (dest, POINTER_TYPE)
8804 || !validate_arg (src, POINTER_TYPE)
8805 || !validate_arg (len, INTEGER_TYPE))
8808 /* If the LEN parameter is zero, return DEST. */
8809 if (integer_zerop (len))
8810 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8812 /* We can't compare slen with len as constants below if len is not a
8814 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8818 slen = c_strlen (src, 1);
8820 /* Now, we must be passed a constant src ptr parameter. */
8821 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8824 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8826 /* We do not support simplification of this case, though we do
8827 support it when expanding trees into RTL. */
8828 /* FIXME: generate a call to __builtin_memset. */
8829 if (tree_int_cst_lt (slen, len))
8832 /* OK transform into builtin memcpy. */
8833 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8836 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8837 build_call_expr (fn, 3, dest, src, len));
8840 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8841 arguments to the call, and TYPE is its return type.
8842 Return NULL_TREE if no simplification can be made. */
8845 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8847 if (!validate_arg (arg1, POINTER_TYPE)
8848 || !validate_arg (arg2, INTEGER_TYPE)
8849 || !validate_arg (len, INTEGER_TYPE))
8855 if (TREE_CODE (arg2) != INTEGER_CST
8856 || !host_integerp (len, 1))
8859 p1 = c_getstr (arg1);
8860 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8866 if (target_char_cast (arg2, &c))
8869 r = memchr (p1, c, tree_low_cst (len, 1));
8872 return build_int_cst (TREE_TYPE (arg1), 0);
8874 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8876 return fold_convert (type, tem);
8882 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8883 Return NULL_TREE if no simplification can be made. */
8886 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8888 const char *p1, *p2;
8890 if (!validate_arg (arg1, POINTER_TYPE)
8891 || !validate_arg (arg2, POINTER_TYPE)
8892 || !validate_arg (len, INTEGER_TYPE))
8895 /* If the LEN parameter is zero, return zero. */
8896 if (integer_zerop (len))
8897 return omit_two_operands (integer_type_node, integer_zero_node,
8900 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8901 if (operand_equal_p (arg1, arg2, 0))
8902 return omit_one_operand (integer_type_node, integer_zero_node, len);
8904 p1 = c_getstr (arg1);
8905 p2 = c_getstr (arg2);
8907 /* If all arguments are constant, and the value of len is not greater
8908 than the lengths of arg1 and arg2, evaluate at compile-time. */
8909 if (host_integerp (len, 1) && p1 && p2
8910 && compare_tree_int (len, strlen (p1) + 1) <= 0
8911 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8913 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8916 return integer_one_node;
8918 return integer_minus_one_node;
8920 return integer_zero_node;
8923 /* If len parameter is one, return an expression corresponding to
8924 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8925 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8927 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8928 tree cst_uchar_ptr_node
8929 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8931 tree ind1 = fold_convert (integer_type_node,
8932 build1 (INDIRECT_REF, cst_uchar_node,
8933 fold_convert (cst_uchar_ptr_node,
8935 tree ind2 = fold_convert (integer_type_node,
8936 build1 (INDIRECT_REF, cst_uchar_node,
8937 fold_convert (cst_uchar_ptr_node,
8939 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8945 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8946 Return NULL_TREE if no simplification can be made. */
8949 fold_builtin_strcmp (tree arg1, tree arg2)
8951 const char *p1, *p2;
8953 if (!validate_arg (arg1, POINTER_TYPE)
8954 || !validate_arg (arg2, POINTER_TYPE))
8957 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8958 if (operand_equal_p (arg1, arg2, 0))
8959 return integer_zero_node;
8961 p1 = c_getstr (arg1);
8962 p2 = c_getstr (arg2);
8966 const int i = strcmp (p1, p2);
8968 return integer_minus_one_node;
8970 return integer_one_node;
8972 return integer_zero_node;
8975 /* If the second arg is "", return *(const unsigned char*)arg1. */
8976 if (p2 && *p2 == '\0')
8978 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8979 tree cst_uchar_ptr_node
8980 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8982 return fold_convert (integer_type_node,
8983 build1 (INDIRECT_REF, cst_uchar_node,
8984 fold_convert (cst_uchar_ptr_node,
8988 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8989 if (p1 && *p1 == '\0')
8991 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8992 tree cst_uchar_ptr_node
8993 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8995 tree temp = fold_convert (integer_type_node,
8996 build1 (INDIRECT_REF, cst_uchar_node,
8997 fold_convert (cst_uchar_ptr_node,
8999 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9005 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9006 Return NULL_TREE if no simplification can be made. */
9009 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9011 const char *p1, *p2;
9013 if (!validate_arg (arg1, POINTER_TYPE)
9014 || !validate_arg (arg2, POINTER_TYPE)
9015 || !validate_arg (len, INTEGER_TYPE))
9018 /* If the LEN parameter is zero, return zero. */
9019 if (integer_zerop (len))
9020 return omit_two_operands (integer_type_node, integer_zero_node,
9023 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9024 if (operand_equal_p (arg1, arg2, 0))
9025 return omit_one_operand (integer_type_node, integer_zero_node, len);
9027 p1 = c_getstr (arg1);
9028 p2 = c_getstr (arg2);
9030 if (host_integerp (len, 1) && p1 && p2)
9032 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9034 return integer_one_node;
9036 return integer_minus_one_node;
9038 return integer_zero_node;
9041 /* If the second arg is "", and the length is greater than zero,
9042 return *(const unsigned char*)arg1. */
9043 if (p2 && *p2 == '\0'
9044 && TREE_CODE (len) == INTEGER_CST
9045 && tree_int_cst_sgn (len) == 1)
9047 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9048 tree cst_uchar_ptr_node
9049 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9051 return fold_convert (integer_type_node,
9052 build1 (INDIRECT_REF, cst_uchar_node,
9053 fold_convert (cst_uchar_ptr_node,
9057 /* If the first arg is "", and the length is greater than zero,
9058 return -*(const unsigned char*)arg2. */
9059 if (p1 && *p1 == '\0'
9060 && TREE_CODE (len) == INTEGER_CST
9061 && tree_int_cst_sgn (len) == 1)
9063 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9064 tree cst_uchar_ptr_node
9065 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9067 tree temp = fold_convert (integer_type_node,
9068 build1 (INDIRECT_REF, cst_uchar_node,
9069 fold_convert (cst_uchar_ptr_node,
9071 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9074 /* If len parameter is one, return an expression corresponding to
9075 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9076 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9078 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9079 tree cst_uchar_ptr_node
9080 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9082 tree ind1 = fold_convert (integer_type_node,
9083 build1 (INDIRECT_REF, cst_uchar_node,
9084 fold_convert (cst_uchar_ptr_node,
9086 tree ind2 = fold_convert (integer_type_node,
9087 build1 (INDIRECT_REF, cst_uchar_node,
9088 fold_convert (cst_uchar_ptr_node,
9090 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9096 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9097 ARG. Return NULL_TREE if no simplification can be made. */
9100 fold_builtin_signbit (tree arg, tree type)
9104 if (!validate_arg (arg, REAL_TYPE))
9107 /* If ARG is a compile-time constant, determine the result. */
9108 if (TREE_CODE (arg) == REAL_CST
9109 && !TREE_OVERFLOW (arg))
9113 c = TREE_REAL_CST (arg);
9114 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9115 return fold_convert (type, temp);
9118 /* If ARG is non-negative, the result is always zero. */
9119 if (tree_expr_nonnegative_p (arg))
9120 return omit_one_operand (type, integer_zero_node, arg);
9122 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9123 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9124 return fold_build2 (LT_EXPR, type, arg,
9125 build_real (TREE_TYPE (arg), dconst0));
9130 /* Fold function call to builtin copysign, copysignf or copysignl with
9131 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9135 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9139 if (!validate_arg (arg1, REAL_TYPE)
9140 || !validate_arg (arg2, REAL_TYPE))
9143 /* copysign(X,X) is X. */
9144 if (operand_equal_p (arg1, arg2, 0))
9145 return fold_convert (type, arg1);
9147 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9148 if (TREE_CODE (arg1) == REAL_CST
9149 && TREE_CODE (arg2) == REAL_CST
9150 && !TREE_OVERFLOW (arg1)
9151 && !TREE_OVERFLOW (arg2))
9153 REAL_VALUE_TYPE c1, c2;
9155 c1 = TREE_REAL_CST (arg1);
9156 c2 = TREE_REAL_CST (arg2);
9157 /* c1.sign := c2.sign. */
9158 real_copysign (&c1, &c2);
9159 return build_real (type, c1);
9162 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9163 Remember to evaluate Y for side-effects. */
9164 if (tree_expr_nonnegative_p (arg2))
9165 return omit_one_operand (type,
9166 fold_build1 (ABS_EXPR, type, arg1),
9169 /* Strip sign changing operations for the first argument. */
9170 tem = fold_strip_sign_ops (arg1);
9172 return build_call_expr (fndecl, 2, tem, arg2);
9177 /* Fold a call to builtin isascii with argument ARG. */
9180 fold_builtin_isascii (tree arg)
9182 if (!validate_arg (arg, INTEGER_TYPE))
9186 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9187 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9188 build_int_cst (NULL_TREE,
9189 ~ (unsigned HOST_WIDE_INT) 0x7f));
9190 return fold_build2 (EQ_EXPR, integer_type_node,
9191 arg, integer_zero_node);
9195 /* Fold a call to builtin toascii with argument ARG. */
9198 fold_builtin_toascii (tree arg)
9200 if (!validate_arg (arg, INTEGER_TYPE))
9203 /* Transform toascii(c) -> (c & 0x7f). */
9204 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9205 build_int_cst (NULL_TREE, 0x7f));
9208 /* Fold a call to builtin isdigit with argument ARG. */
9211 fold_builtin_isdigit (tree arg)
9213 if (!validate_arg (arg, INTEGER_TYPE))
9217 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9218 /* According to the C standard, isdigit is unaffected by locale.
9219 However, it definitely is affected by the target character set. */
9220 unsigned HOST_WIDE_INT target_digit0
9221 = lang_hooks.to_target_charset ('0');
9223 if (target_digit0 == 0)
9226 arg = fold_convert (unsigned_type_node, arg);
9227 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9228 build_int_cst (unsigned_type_node, target_digit0));
9229 return fold_build2 (LE_EXPR, integer_type_node, arg,
9230 build_int_cst (unsigned_type_node, 9));
9234 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9237 fold_builtin_fabs (tree arg, tree type)
9239 if (!validate_arg (arg, REAL_TYPE))
9242 arg = fold_convert (type, arg);
9243 if (TREE_CODE (arg) == REAL_CST)
9244 return fold_abs_const (arg, type);
9245 return fold_build1 (ABS_EXPR, type, arg);
9248 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9251 fold_builtin_abs (tree arg, tree type)
9253 if (!validate_arg (arg, INTEGER_TYPE))
9256 arg = fold_convert (type, arg);
9257 if (TREE_CODE (arg) == INTEGER_CST)
9258 return fold_abs_const (arg, type);
9259 return fold_build1 (ABS_EXPR, type, arg);
9262 /* Fold a call to builtin fmin or fmax. */
9265 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9267 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9269 /* Calculate the result when the argument is a constant. */
9270 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9275 /* If either argument is NaN, return the other one. Avoid the
9276 transformation if we get (and honor) a signalling NaN. Using
9277 omit_one_operand() ensures we create a non-lvalue. */
9278 if (TREE_CODE (arg0) == REAL_CST
9279 && real_isnan (&TREE_REAL_CST (arg0))
9280 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9281 || ! TREE_REAL_CST (arg0).signalling))
9282 return omit_one_operand (type, arg1, arg0);
9283 if (TREE_CODE (arg1) == REAL_CST
9284 && real_isnan (&TREE_REAL_CST (arg1))
9285 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9286 || ! TREE_REAL_CST (arg1).signalling))
9287 return omit_one_operand (type, arg0, arg1);
9289 /* Transform fmin/fmax(x,x) -> x. */
9290 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9291 return omit_one_operand (type, arg0, arg1);
9293 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9294 functions to return the numeric arg if the other one is NaN.
9295 These tree codes don't honor that, so only transform if
9296 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9297 handled, so we don't have to worry about it either. */
9298 if (flag_finite_math_only)
9299 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9300 fold_convert (type, arg0),
9301 fold_convert (type, arg1));
9306 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9309 fold_builtin_carg (tree arg, tree type)
9311 if (validate_arg (arg, COMPLEX_TYPE))
9313 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9317 tree new_arg = builtin_save_expr (arg);
9318 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9319 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9320 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9327 /* Fold a call to builtin logb/ilogb. */
9330 fold_builtin_logb (tree arg, tree rettype)
9332 if (! validate_arg (arg, REAL_TYPE))
9337 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9339 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9345 /* If arg is Inf or NaN and we're logb, return it. */
9346 if (TREE_CODE (rettype) == REAL_TYPE)
9347 return fold_convert (rettype, arg);
9348 /* Fall through... */
9350 /* Zero may set errno and/or raise an exception for logb, also
9351 for ilogb we don't know FP_ILOGB0. */
9354 /* For normal numbers, proceed iff radix == 2. In GCC,
9355 normalized significands are in the range [0.5, 1.0). We
9356 want the exponent as if they were [1.0, 2.0) so get the
9357 exponent and subtract 1. */
9358 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9359 return fold_convert (rettype, build_int_cst (NULL_TREE,
9360 REAL_EXP (value)-1));
9368 /* Fold a call to builtin significand, if radix == 2. */
9371 fold_builtin_significand (tree arg, tree rettype)
9373 if (! validate_arg (arg, REAL_TYPE))
9378 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9380 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9387 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9388 return fold_convert (rettype, arg);
9390 /* For normal numbers, proceed iff radix == 2. */
9391 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9393 REAL_VALUE_TYPE result = *value;
9394 /* In GCC, normalized significands are in the range [0.5,
9395 1.0). We want them to be [1.0, 2.0) so set the
9397 SET_REAL_EXP (&result, 1);
9398 return build_real (rettype, result);
9407 /* Fold a call to builtin frexp, we can assume the base is 2. */
9410 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9412 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9417 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9420 arg1 = build_fold_indirect_ref (arg1);
9422 /* Proceed if a valid pointer type was passed in. */
9423 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9425 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9431 /* For +-0, return (*exp = 0, +-0). */
9432 exp = integer_zero_node;
9437 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9438 return omit_one_operand (rettype, arg0, arg1);
9441 /* Since the frexp function always expects base 2, and in
9442 GCC normalized significands are already in the range
9443 [0.5, 1.0), we have exactly what frexp wants. */
9444 REAL_VALUE_TYPE frac_rvt = *value;
9445 SET_REAL_EXP (&frac_rvt, 0);
9446 frac = build_real (rettype, frac_rvt);
9447 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9454 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9455 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9456 TREE_SIDE_EFFECTS (arg1) = 1;
9457 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9463 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9464 then we can assume the base is two. If it's false, then we have to
9465 check the mode of the TYPE parameter in certain cases. */
9468 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9470 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9475 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9476 if (real_zerop (arg0) || integer_zerop (arg1)
9477 || (TREE_CODE (arg0) == REAL_CST
9478 && !real_isfinite (&TREE_REAL_CST (arg0))))
9479 return omit_one_operand (type, arg0, arg1);
9481 /* If both arguments are constant, then try to evaluate it. */
9482 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9483 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9484 && host_integerp (arg1, 0))
9486 /* Bound the maximum adjustment to twice the range of the
9487 mode's valid exponents. Use abs to ensure the range is
9488 positive as a sanity check. */
9489 const long max_exp_adj = 2 *
9490 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9491 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9493 /* Get the user-requested adjustment. */
9494 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9496 /* The requested adjustment must be inside this range. This
9497 is a preliminary cap to avoid things like overflow, we
9498 may still fail to compute the result for other reasons. */
9499 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9501 REAL_VALUE_TYPE initial_result;
9503 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9505 /* Ensure we didn't overflow. */
9506 if (! real_isinf (&initial_result))
9508 const REAL_VALUE_TYPE trunc_result
9509 = real_value_truncate (TYPE_MODE (type), initial_result);
9511 /* Only proceed if the target mode can hold the
9513 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9514 return build_real (type, trunc_result);
9523 /* Fold a call to builtin modf. */
9526 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9528 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9533 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9536 arg1 = build_fold_indirect_ref (arg1);
9538 /* Proceed if a valid pointer type was passed in. */
9539 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9541 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9542 REAL_VALUE_TYPE trunc, frac;
9548 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9549 trunc = frac = *value;
9552 /* For +-Inf, return (*arg1 = arg0, +-0). */
9554 frac.sign = value->sign;
9558 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9559 real_trunc (&trunc, VOIDmode, value);
9560 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9561 /* If the original number was negative and already
9562 integral, then the fractional part is -0.0. */
9563 if (value->sign && frac.cl == rvc_zero)
9564 frac.sign = value->sign;
9568 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9569 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9570 build_real (rettype, trunc));
9571 TREE_SIDE_EFFECTS (arg1) = 1;
9572 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9573 build_real (rettype, frac));
9579 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9580 ARG is the argument for the call. */
9583 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9588 if (!validate_arg (arg, REAL_TYPE))
9590 error ("non-floating-point argument to function %qs",
9591 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9592 return error_mark_node;
9595 switch (builtin_index)
9597 case BUILT_IN_ISINF:
9598 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9599 return omit_one_operand (type, integer_zero_node, arg);
9601 if (TREE_CODE (arg) == REAL_CST)
9603 r = TREE_REAL_CST (arg);
9604 if (real_isinf (&r))
9605 return real_compare (GT_EXPR, &r, &dconst0)
9606 ? integer_one_node : integer_minus_one_node;
9608 return integer_zero_node;
9613 case BUILT_IN_ISFINITE:
9614 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9615 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9616 return omit_one_operand (type, integer_one_node, arg);
9618 if (TREE_CODE (arg) == REAL_CST)
9620 r = TREE_REAL_CST (arg);
9621 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9626 case BUILT_IN_ISNAN:
9627 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9628 return omit_one_operand (type, integer_zero_node, arg);
9630 if (TREE_CODE (arg) == REAL_CST)
9632 r = TREE_REAL_CST (arg);
9633 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9636 arg = builtin_save_expr (arg);
9637 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9644 /* Fold a call to an unordered comparison function such as
9645 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9646 being called and ARG0 and ARG1 are the arguments for the call.
9647 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9648 the opposite of the desired result. UNORDERED_CODE is used
9649 for modes that can hold NaNs and ORDERED_CODE is used for
9653 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9654 enum tree_code unordered_code,
9655 enum tree_code ordered_code)
9657 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9658 enum tree_code code;
9660 enum tree_code code0, code1;
9661 tree cmp_type = NULL_TREE;
9663 type0 = TREE_TYPE (arg0);
9664 type1 = TREE_TYPE (arg1);
9666 code0 = TREE_CODE (type0);
9667 code1 = TREE_CODE (type1);
9669 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9670 /* Choose the wider of two real types. */
9671 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9673 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9675 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9679 error ("non-floating-point argument to function %qs",
9680 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9681 return error_mark_node;
9684 arg0 = fold_convert (cmp_type, arg0);
9685 arg1 = fold_convert (cmp_type, arg1);
9687 if (unordered_code == UNORDERED_EXPR)
9689 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9690 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9691 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9694 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9696 return fold_build1 (TRUTH_NOT_EXPR, type,
9697 fold_build2 (code, type, arg0, arg1));
9700 /* Fold a call to built-in function FNDECL with 0 arguments.
9701 IGNORE is true if the result of the function call is ignored. This
9702 function returns NULL_TREE if no simplification was possible. */
9705 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9707 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9708 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9711 CASE_FLT_FN (BUILT_IN_INF):
9712 case BUILT_IN_INFD32:
9713 case BUILT_IN_INFD64:
9714 case BUILT_IN_INFD128:
9715 return fold_builtin_inf (type, true);
9717 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9718 return fold_builtin_inf (type, false);
9720 case BUILT_IN_CLASSIFY_TYPE:
9721 return fold_builtin_classify_type (NULL_TREE);
9729 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9730 IGNORE is true if the result of the function call is ignored. This
9731 function returns NULL_TREE if no simplification was possible. */
9734 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9736 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9737 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9741 case BUILT_IN_CONSTANT_P:
9743 tree val = fold_builtin_constant_p (arg0);
9745 /* Gimplification will pull the CALL_EXPR for the builtin out of
9746 an if condition. When not optimizing, we'll not CSE it back.
9747 To avoid link error types of regressions, return false now. */
9748 if (!val && !optimize)
9749 val = integer_zero_node;
9754 case BUILT_IN_CLASSIFY_TYPE:
9755 return fold_builtin_classify_type (arg0);
9757 case BUILT_IN_STRLEN:
9758 return fold_builtin_strlen (arg0);
9760 CASE_FLT_FN (BUILT_IN_FABS):
9761 return fold_builtin_fabs (arg0, type);
9765 case BUILT_IN_LLABS:
9766 case BUILT_IN_IMAXABS:
9767 return fold_builtin_abs (arg0, type);
9769 CASE_FLT_FN (BUILT_IN_CONJ):
9770 if (validate_arg (arg0, COMPLEX_TYPE))
9771 return fold_build1 (CONJ_EXPR, type, arg0);
9774 CASE_FLT_FN (BUILT_IN_CREAL):
9775 if (validate_arg (arg0, COMPLEX_TYPE))
9776 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9779 CASE_FLT_FN (BUILT_IN_CIMAG):
9780 if (validate_arg (arg0, COMPLEX_TYPE))
9781 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9784 CASE_FLT_FN (BUILT_IN_CCOS):
9785 CASE_FLT_FN (BUILT_IN_CCOSH):
9786 /* These functions are "even", i.e. f(x) == f(-x). */
9787 if (validate_arg (arg0, COMPLEX_TYPE))
9789 tree narg = fold_strip_sign_ops (arg0);
9791 return build_call_expr (fndecl, 1, narg);
9795 CASE_FLT_FN (BUILT_IN_CABS):
9796 return fold_builtin_cabs (arg0, type, fndecl);
9798 CASE_FLT_FN (BUILT_IN_CARG):
9799 return fold_builtin_carg (arg0, type);
9801 CASE_FLT_FN (BUILT_IN_SQRT):
9802 return fold_builtin_sqrt (arg0, type);
9804 CASE_FLT_FN (BUILT_IN_CBRT):
9805 return fold_builtin_cbrt (arg0, type);
9807 CASE_FLT_FN (BUILT_IN_ASIN):
9808 if (validate_arg (arg0, REAL_TYPE))
9809 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9810 &dconstm1, &dconst1, true);
9813 CASE_FLT_FN (BUILT_IN_ACOS):
9814 if (validate_arg (arg0, REAL_TYPE))
9815 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9816 &dconstm1, &dconst1, true);
9819 CASE_FLT_FN (BUILT_IN_ATAN):
9820 if (validate_arg (arg0, REAL_TYPE))
9821 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9824 CASE_FLT_FN (BUILT_IN_ASINH):
9825 if (validate_arg (arg0, REAL_TYPE))
9826 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9829 CASE_FLT_FN (BUILT_IN_ACOSH):
9830 if (validate_arg (arg0, REAL_TYPE))
9831 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9832 &dconst1, NULL, true);
9835 CASE_FLT_FN (BUILT_IN_ATANH):
9836 if (validate_arg (arg0, REAL_TYPE))
9837 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9838 &dconstm1, &dconst1, false);
9841 CASE_FLT_FN (BUILT_IN_SIN):
9842 if (validate_arg (arg0, REAL_TYPE))
9843 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9846 CASE_FLT_FN (BUILT_IN_COS):
9847 return fold_builtin_cos (arg0, type, fndecl);
9850 CASE_FLT_FN (BUILT_IN_TAN):
9851 return fold_builtin_tan (arg0, type);
9853 CASE_FLT_FN (BUILT_IN_CEXP):
9854 return fold_builtin_cexp (arg0, type);
9856 CASE_FLT_FN (BUILT_IN_CEXPI):
9857 if (validate_arg (arg0, REAL_TYPE))
9858 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9861 CASE_FLT_FN (BUILT_IN_SINH):
9862 if (validate_arg (arg0, REAL_TYPE))
9863 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9866 CASE_FLT_FN (BUILT_IN_COSH):
9867 return fold_builtin_cosh (arg0, type, fndecl);
9869 CASE_FLT_FN (BUILT_IN_TANH):
9870 if (validate_arg (arg0, REAL_TYPE))
9871 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9874 CASE_FLT_FN (BUILT_IN_ERF):
9875 if (validate_arg (arg0, REAL_TYPE))
9876 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9879 CASE_FLT_FN (BUILT_IN_ERFC):
9880 if (validate_arg (arg0, REAL_TYPE))
9881 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9884 CASE_FLT_FN (BUILT_IN_TGAMMA):
9885 if (validate_arg (arg0, REAL_TYPE))
9886 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9889 CASE_FLT_FN (BUILT_IN_EXP):
9890 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9892 CASE_FLT_FN (BUILT_IN_EXP2):
9893 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9895 CASE_FLT_FN (BUILT_IN_EXP10):
9896 CASE_FLT_FN (BUILT_IN_POW10):
9897 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9899 CASE_FLT_FN (BUILT_IN_EXPM1):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9904 CASE_FLT_FN (BUILT_IN_LOG):
9905 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9907 CASE_FLT_FN (BUILT_IN_LOG2):
9908 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9910 CASE_FLT_FN (BUILT_IN_LOG10):
9911 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9913 CASE_FLT_FN (BUILT_IN_LOG1P):
9914 if (validate_arg (arg0, REAL_TYPE))
9915 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9916 &dconstm1, NULL, false);
9919 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9920 CASE_FLT_FN (BUILT_IN_J0):
9921 if (validate_arg (arg0, REAL_TYPE))
9922 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9926 CASE_FLT_FN (BUILT_IN_J1):
9927 if (validate_arg (arg0, REAL_TYPE))
9928 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9932 CASE_FLT_FN (BUILT_IN_Y0):
9933 if (validate_arg (arg0, REAL_TYPE))
9934 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9935 &dconst0, NULL, false);
9938 CASE_FLT_FN (BUILT_IN_Y1):
9939 if (validate_arg (arg0, REAL_TYPE))
9940 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9941 &dconst0, NULL, false);
9945 CASE_FLT_FN (BUILT_IN_NAN):
9946 case BUILT_IN_NAND32:
9947 case BUILT_IN_NAND64:
9948 case BUILT_IN_NAND128:
9949 return fold_builtin_nan (arg0, type, true);
9951 CASE_FLT_FN (BUILT_IN_NANS):
9952 return fold_builtin_nan (arg0, type, false);
9954 CASE_FLT_FN (BUILT_IN_FLOOR):
9955 return fold_builtin_floor (fndecl, arg0);
9957 CASE_FLT_FN (BUILT_IN_CEIL):
9958 return fold_builtin_ceil (fndecl, arg0);
9960 CASE_FLT_FN (BUILT_IN_TRUNC):
9961 return fold_builtin_trunc (fndecl, arg0);
9963 CASE_FLT_FN (BUILT_IN_ROUND):
9964 return fold_builtin_round (fndecl, arg0);
9966 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9967 CASE_FLT_FN (BUILT_IN_RINT):
9968 return fold_trunc_transparent_mathfn (fndecl, arg0);
9970 CASE_FLT_FN (BUILT_IN_LCEIL):
9971 CASE_FLT_FN (BUILT_IN_LLCEIL):
9972 CASE_FLT_FN (BUILT_IN_LFLOOR):
9973 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9974 CASE_FLT_FN (BUILT_IN_LROUND):
9975 CASE_FLT_FN (BUILT_IN_LLROUND):
9976 return fold_builtin_int_roundingfn (fndecl, arg0);
9978 CASE_FLT_FN (BUILT_IN_LRINT):
9979 CASE_FLT_FN (BUILT_IN_LLRINT):
9980 return fold_fixed_mathfn (fndecl, arg0);
9982 case BUILT_IN_BSWAP32:
9983 case BUILT_IN_BSWAP64:
9984 return fold_builtin_bswap (fndecl, arg0);
9986 CASE_INT_FN (BUILT_IN_FFS):
9987 CASE_INT_FN (BUILT_IN_CLZ):
9988 CASE_INT_FN (BUILT_IN_CTZ):
9989 CASE_INT_FN (BUILT_IN_POPCOUNT):
9990 CASE_INT_FN (BUILT_IN_PARITY):
9991 return fold_builtin_bitop (fndecl, arg0);
9993 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9994 return fold_builtin_signbit (arg0, type);
9996 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9997 return fold_builtin_significand (arg0, type);
9999 CASE_FLT_FN (BUILT_IN_ILOGB):
10000 CASE_FLT_FN (BUILT_IN_LOGB):
10001 return fold_builtin_logb (arg0, type);
10003 case BUILT_IN_ISASCII:
10004 return fold_builtin_isascii (arg0);
10006 case BUILT_IN_TOASCII:
10007 return fold_builtin_toascii (arg0);
10009 case BUILT_IN_ISDIGIT:
10010 return fold_builtin_isdigit (arg0);
10012 CASE_FLT_FN (BUILT_IN_FINITE):
10013 case BUILT_IN_FINITED32:
10014 case BUILT_IN_FINITED64:
10015 case BUILT_IN_FINITED128:
10016 case BUILT_IN_ISFINITE:
10017 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10019 CASE_FLT_FN (BUILT_IN_ISINF):
10020 case BUILT_IN_ISINFD32:
10021 case BUILT_IN_ISINFD64:
10022 case BUILT_IN_ISINFD128:
10023 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10025 CASE_FLT_FN (BUILT_IN_ISNAN):
10026 case BUILT_IN_ISNAND32:
10027 case BUILT_IN_ISNAND64:
10028 case BUILT_IN_ISNAND128:
10029 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10031 case BUILT_IN_PRINTF:
10032 case BUILT_IN_PRINTF_UNLOCKED:
10033 case BUILT_IN_VPRINTF:
10034 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10044 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10045 IGNORE is true if the result of the function call is ignored. This
10046 function returns NULL_TREE if no simplification was possible. */
10049 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10051 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10056 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10057 CASE_FLT_FN (BUILT_IN_JN):
10058 if (validate_arg (arg0, INTEGER_TYPE)
10059 && validate_arg (arg1, REAL_TYPE))
10060 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10063 CASE_FLT_FN (BUILT_IN_YN):
10064 if (validate_arg (arg0, INTEGER_TYPE)
10065 && validate_arg (arg1, REAL_TYPE))
10066 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10070 CASE_FLT_FN (BUILT_IN_DREM):
10071 CASE_FLT_FN (BUILT_IN_REMAINDER):
10072 if (validate_arg (arg0, REAL_TYPE)
10073 && validate_arg(arg1, REAL_TYPE))
10074 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10077 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10078 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10079 if (validate_arg (arg0, REAL_TYPE)
10080 && validate_arg(arg1, POINTER_TYPE))
10081 return do_mpfr_lgamma_r (arg0, arg1, type);
10085 CASE_FLT_FN (BUILT_IN_ATAN2):
10086 if (validate_arg (arg0, REAL_TYPE)
10087 && validate_arg(arg1, REAL_TYPE))
10088 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10091 CASE_FLT_FN (BUILT_IN_FDIM):
10092 if (validate_arg (arg0, REAL_TYPE)
10093 && validate_arg(arg1, REAL_TYPE))
10094 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10097 CASE_FLT_FN (BUILT_IN_HYPOT):
10098 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10100 CASE_FLT_FN (BUILT_IN_LDEXP):
10101 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10102 CASE_FLT_FN (BUILT_IN_SCALBN):
10103 CASE_FLT_FN (BUILT_IN_SCALBLN):
10104 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10106 CASE_FLT_FN (BUILT_IN_FREXP):
10107 return fold_builtin_frexp (arg0, arg1, type);
10109 CASE_FLT_FN (BUILT_IN_MODF):
10110 return fold_builtin_modf (arg0, arg1, type);
10112 case BUILT_IN_BZERO:
10113 return fold_builtin_bzero (arg0, arg1, ignore);
10115 case BUILT_IN_FPUTS:
10116 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10118 case BUILT_IN_FPUTS_UNLOCKED:
10119 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10121 case BUILT_IN_STRSTR:
10122 return fold_builtin_strstr (arg0, arg1, type);
10124 case BUILT_IN_STRCAT:
10125 return fold_builtin_strcat (arg0, arg1);
10127 case BUILT_IN_STRSPN:
10128 return fold_builtin_strspn (arg0, arg1);
10130 case BUILT_IN_STRCSPN:
10131 return fold_builtin_strcspn (arg0, arg1);
10133 case BUILT_IN_STRCHR:
10134 case BUILT_IN_INDEX:
10135 return fold_builtin_strchr (arg0, arg1, type);
10137 case BUILT_IN_STRRCHR:
10138 case BUILT_IN_RINDEX:
10139 return fold_builtin_strrchr (arg0, arg1, type);
10141 case BUILT_IN_STRCPY:
10142 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10144 case BUILT_IN_STRCMP:
10145 return fold_builtin_strcmp (arg0, arg1);
10147 case BUILT_IN_STRPBRK:
10148 return fold_builtin_strpbrk (arg0, arg1, type);
10150 case BUILT_IN_EXPECT:
10151 return fold_builtin_expect (arg0);
10153 CASE_FLT_FN (BUILT_IN_POW):
10154 return fold_builtin_pow (fndecl, arg0, arg1, type);
10156 CASE_FLT_FN (BUILT_IN_POWI):
10157 return fold_builtin_powi (fndecl, arg0, arg1, type);
10159 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10160 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10162 CASE_FLT_FN (BUILT_IN_FMIN):
10163 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10165 CASE_FLT_FN (BUILT_IN_FMAX):
10166 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10168 case BUILT_IN_ISGREATER:
10169 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10170 case BUILT_IN_ISGREATEREQUAL:
10171 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10172 case BUILT_IN_ISLESS:
10173 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10174 case BUILT_IN_ISLESSEQUAL:
10175 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10176 case BUILT_IN_ISLESSGREATER:
10177 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10178 case BUILT_IN_ISUNORDERED:
10179 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10182 /* We do the folding for va_start in the expander. */
10183 case BUILT_IN_VA_START:
10186 case BUILT_IN_SPRINTF:
10187 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10189 case BUILT_IN_OBJECT_SIZE:
10190 return fold_builtin_object_size (arg0, arg1);
10192 case BUILT_IN_PRINTF:
10193 case BUILT_IN_PRINTF_UNLOCKED:
10194 case BUILT_IN_VPRINTF:
10195 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10197 case BUILT_IN_PRINTF_CHK:
10198 case BUILT_IN_VPRINTF_CHK:
10199 if (!validate_arg (arg0, INTEGER_TYPE)
10200 || TREE_SIDE_EFFECTS (arg0))
10203 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10206 case BUILT_IN_FPRINTF:
10207 case BUILT_IN_FPRINTF_UNLOCKED:
10208 case BUILT_IN_VFPRINTF:
10209 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10218 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10219 and ARG2. IGNORE is true if the result of the function call is ignored.
10220 This function returns NULL_TREE if no simplification was possible. */
10223 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10225 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10226 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10230 CASE_FLT_FN (BUILT_IN_SINCOS):
10231 return fold_builtin_sincos (arg0, arg1, arg2);
10233 CASE_FLT_FN (BUILT_IN_FMA):
10234 if (validate_arg (arg0, REAL_TYPE)
10235 && validate_arg(arg1, REAL_TYPE)
10236 && validate_arg(arg2, REAL_TYPE))
10237 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10240 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10241 CASE_FLT_FN (BUILT_IN_REMQUO):
10242 if (validate_arg (arg0, REAL_TYPE)
10243 && validate_arg(arg1, REAL_TYPE)
10244 && validate_arg(arg2, POINTER_TYPE))
10245 return do_mpfr_remquo (arg0, arg1, arg2);
10249 case BUILT_IN_MEMSET:
10250 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10252 case BUILT_IN_BCOPY:
10253 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10255 case BUILT_IN_MEMCPY:
10256 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10258 case BUILT_IN_MEMPCPY:
10259 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10261 case BUILT_IN_MEMMOVE:
10262 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10264 case BUILT_IN_STRNCAT:
10265 return fold_builtin_strncat (arg0, arg1, arg2);
10267 case BUILT_IN_STRNCPY:
10268 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10270 case BUILT_IN_STRNCMP:
10271 return fold_builtin_strncmp (arg0, arg1, arg2);
10273 case BUILT_IN_MEMCHR:
10274 return fold_builtin_memchr (arg0, arg1, arg2, type);
10276 case BUILT_IN_BCMP:
10277 case BUILT_IN_MEMCMP:
10278 return fold_builtin_memcmp (arg0, arg1, arg2);;
10280 case BUILT_IN_SPRINTF:
10281 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10283 case BUILT_IN_STRCPY_CHK:
10284 case BUILT_IN_STPCPY_CHK:
10285 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10288 case BUILT_IN_STRCAT_CHK:
10289 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10291 case BUILT_IN_PRINTF_CHK:
10292 case BUILT_IN_VPRINTF_CHK:
10293 if (!validate_arg (arg0, INTEGER_TYPE)
10294 || TREE_SIDE_EFFECTS (arg0))
10297 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10300 case BUILT_IN_FPRINTF:
10301 case BUILT_IN_FPRINTF_UNLOCKED:
10302 case BUILT_IN_VFPRINTF:
10303 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10305 case BUILT_IN_FPRINTF_CHK:
10306 case BUILT_IN_VFPRINTF_CHK:
10307 if (!validate_arg (arg1, INTEGER_TYPE)
10308 || TREE_SIDE_EFFECTS (arg1))
10311 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10320 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10321 ARG2, and ARG3. IGNORE is true if the result of the function call is
10322 ignored. This function returns NULL_TREE if no simplification was
10326 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10329 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10333 case BUILT_IN_MEMCPY_CHK:
10334 case BUILT_IN_MEMPCPY_CHK:
10335 case BUILT_IN_MEMMOVE_CHK:
10336 case BUILT_IN_MEMSET_CHK:
10337 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10339 DECL_FUNCTION_CODE (fndecl));
10341 case BUILT_IN_STRNCPY_CHK:
10342 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10344 case BUILT_IN_STRNCAT_CHK:
10345 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10347 case BUILT_IN_FPRINTF_CHK:
10348 case BUILT_IN_VFPRINTF_CHK:
10349 if (!validate_arg (arg1, INTEGER_TYPE)
10350 || TREE_SIDE_EFFECTS (arg1))
10353 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10363 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10364 arguments, where NARGS <= 4. IGNORE is true if the result of the
10365 function call is ignored. This function returns NULL_TREE if no
10366 simplification was possible. Note that this only folds builtins with
10367 fixed argument patterns. Foldings that do varargs-to-varargs
10368 transformations, or that match calls with more than 4 arguments,
10369 need to be handled with fold_builtin_varargs instead. */
10371 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10374 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10376 tree ret = NULL_TREE;
10380 ret = fold_builtin_0 (fndecl, ignore);
10383 ret = fold_builtin_1 (fndecl, args[0], ignore);
10386 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10389 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10392 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10400 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10401 TREE_NO_WARNING (ret) = 1;
10407 /* Builtins with folding operations that operate on "..." arguments
10408 need special handling; we need to store the arguments in a convenient
10409 data structure before attempting any folding. Fortunately there are
10410 only a few builtins that fall into this category. FNDECL is the
10411 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10412 result of the function call is ignored. */
10415 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10417 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10418 tree ret = NULL_TREE;
10422 case BUILT_IN_SPRINTF_CHK:
10423 case BUILT_IN_VSPRINTF_CHK:
10424 ret = fold_builtin_sprintf_chk (exp, fcode);
10427 case BUILT_IN_SNPRINTF_CHK:
10428 case BUILT_IN_VSNPRINTF_CHK:
10429 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10436 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10437 TREE_NO_WARNING (ret) = 1;
10443 /* A wrapper function for builtin folding that prevents warnings for
10444 "statement without effect" and the like, caused by removing the
10445 call node earlier than the warning is generated. */
10448 fold_call_expr (tree exp, bool ignore)
10450 tree ret = NULL_TREE;
10451 tree fndecl = get_callee_fndecl (exp);
10453 && TREE_CODE (fndecl) == FUNCTION_DECL
10454 && DECL_BUILT_IN (fndecl))
10456 /* FIXME: Don't use a list in this interface. */
10457 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10458 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10461 int nargs = call_expr_nargs (exp);
10462 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10464 tree *args = CALL_EXPR_ARGP (exp);
10465 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10468 ret = fold_builtin_varargs (fndecl, exp, ignore);
10471 /* Propagate location information from original call to
10472 expansion of builtin. Otherwise things like
10473 maybe_emit_chk_warning, that operate on the expansion
10474 of a builtin, will use the wrong location information. */
10475 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10477 tree realret = ret;
10478 if (TREE_CODE (ret) == NOP_EXPR)
10479 realret = TREE_OPERAND (ret, 0);
10480 if (CAN_HAVE_LOCATION_P (realret)
10481 && !EXPR_HAS_LOCATION (realret))
10482 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10491 /* Conveniently construct a function call expression. FNDECL names the
10492 function to be called and ARGLIST is a TREE_LIST of arguments. */
10495 build_function_call_expr (tree fndecl, tree arglist)
10497 tree fntype = TREE_TYPE (fndecl);
10498 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10499 int n = list_length (arglist);
10500 tree *argarray = (tree *) alloca (n * sizeof (tree));
10503 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10504 argarray[i] = TREE_VALUE (arglist);
10505 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10508 /* Conveniently construct a function call expression. FNDECL names the
10509 function to be called, N is the number of arguments, and the "..."
10510 parameters are the argument expressions. */
10513 build_call_expr (tree fndecl, int n, ...)
10516 tree fntype = TREE_TYPE (fndecl);
10517 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10518 tree *argarray = (tree *) alloca (n * sizeof (tree));
10522 for (i = 0; i < n; i++)
10523 argarray[i] = va_arg (ap, tree);
10525 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10528 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10529 N arguments are passed in the array ARGARRAY. */
10532 fold_builtin_call_array (tree type,
10537 tree ret = NULL_TREE;
10541 if (TREE_CODE (fn) == ADDR_EXPR)
10543 tree fndecl = TREE_OPERAND (fn, 0);
10544 if (TREE_CODE (fndecl) == FUNCTION_DECL
10545 && DECL_BUILT_IN (fndecl))
10547 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10549 tree arglist = NULL_TREE;
10550 for (i = n - 1; i >= 0; i--)
10551 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10552 ret = targetm.fold_builtin (fndecl, arglist, false);
10556 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10558 /* First try the transformations that don't require consing up
10560 ret = fold_builtin_n (fndecl, argarray, n, false);
10565 /* If we got this far, we need to build an exp. */
10566 exp = build_call_array (type, fn, n, argarray);
10567 ret = fold_builtin_varargs (fndecl, exp, false);
10568 return ret ? ret : exp;
10572 return build_call_array (type, fn, n, argarray);
10575 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10576 along with N new arguments specified as the "..." parameters. SKIP
10577 is the number of arguments in EXP to be omitted. This function is used
10578 to do varargs-to-varargs transformations. */
10581 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10583 int oldnargs = call_expr_nargs (exp);
10584 int nargs = oldnargs - skip + n;
10585 tree fntype = TREE_TYPE (fndecl);
10586 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10594 buffer = alloca (nargs * sizeof (tree));
10596 for (i = 0; i < n; i++)
10597 buffer[i] = va_arg (ap, tree);
10599 for (j = skip; j < oldnargs; j++, i++)
10600 buffer[i] = CALL_EXPR_ARG (exp, j);
10603 buffer = CALL_EXPR_ARGP (exp) + skip;
10605 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10608 /* Validate a single argument ARG against a tree code CODE representing
10612 validate_arg (const_tree arg, enum tree_code code)
10616 else if (code == POINTER_TYPE)
10617 return POINTER_TYPE_P (TREE_TYPE (arg));
10618 return code == TREE_CODE (TREE_TYPE (arg));
10621 /* This function validates the types of a function call argument list
10622 against a specified list of tree_codes. If the last specifier is a 0,
10623 that represents an ellipses, otherwise the last specifier must be a
10627 validate_arglist (const_tree callexpr, ...)
10629 enum tree_code code;
10632 const_call_expr_arg_iterator iter;
10635 va_start (ap, callexpr);
10636 init_const_call_expr_arg_iterator (callexpr, &iter);
10640 code = va_arg (ap, enum tree_code);
10644 /* This signifies an ellipses, any further arguments are all ok. */
10648 /* This signifies an endlink, if no arguments remain, return
10649 true, otherwise return false. */
10650 res = !more_const_call_expr_args_p (&iter);
10653 /* If no parameters remain or the parameter's code does not
10654 match the specified code, return false. Otherwise continue
10655 checking any remaining arguments. */
10656 arg = next_const_call_expr_arg (&iter);
10657 if (!validate_arg (arg, code))
10664 /* We need gotos here since we can only have one VA_CLOSE in a
10672 /* Default target-specific builtin expander that does nothing. */
10675 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10676 rtx target ATTRIBUTE_UNUSED,
10677 rtx subtarget ATTRIBUTE_UNUSED,
10678 enum machine_mode mode ATTRIBUTE_UNUSED,
10679 int ignore ATTRIBUTE_UNUSED)
10684 /* Returns true is EXP represents data that would potentially reside
10685 in a readonly section. */
10688 readonly_data_expr (tree exp)
10692 if (TREE_CODE (exp) != ADDR_EXPR)
10695 exp = get_base_address (TREE_OPERAND (exp, 0));
10699 /* Make sure we call decl_readonly_section only for trees it
10700 can handle (since it returns true for everything it doesn't
10702 if (TREE_CODE (exp) == STRING_CST
10703 || TREE_CODE (exp) == CONSTRUCTOR
10704 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10705 return decl_readonly_section (exp, 0);
10710 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10711 to the call, and TYPE is its return type.
10713 Return NULL_TREE if no simplification was possible, otherwise return the
10714 simplified form of the call as a tree.
10716 The simplified form may be a constant or other expression which
10717 computes the same value, but in a more efficient manner (including
10718 calls to other builtin functions).
10720 The call may contain arguments which need to be evaluated, but
10721 which are not useful to determine the result of the call. In
10722 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10723 COMPOUND_EXPR will be an argument which must be evaluated.
10724 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10725 COMPOUND_EXPR in the chain will contain the tree for the simplified
10726 form of the builtin function call. */
10729 fold_builtin_strstr (tree s1, tree s2, tree type)
10731 if (!validate_arg (s1, POINTER_TYPE)
10732 || !validate_arg (s2, POINTER_TYPE))
10737 const char *p1, *p2;
10739 p2 = c_getstr (s2);
10743 p1 = c_getstr (s1);
10746 const char *r = strstr (p1, p2);
10750 return build_int_cst (TREE_TYPE (s1), 0);
10752 /* Return an offset into the constant string argument. */
10753 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10754 s1, size_int (r - p1));
10755 return fold_convert (type, tem);
10758 /* The argument is const char *, and the result is char *, so we need
10759 a type conversion here to avoid a warning. */
10761 return fold_convert (type, s1);
10766 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10770 /* New argument list transforming strstr(s1, s2) to
10771 strchr(s1, s2[0]). */
10772 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10776 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10777 the call, and TYPE is its return type.
10779 Return NULL_TREE if no simplification was possible, otherwise return the
10780 simplified form of the call as a tree.
10782 The simplified form may be a constant or other expression which
10783 computes the same value, but in a more efficient manner (including
10784 calls to other builtin functions).
10786 The call may contain arguments which need to be evaluated, but
10787 which are not useful to determine the result of the call. In
10788 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10789 COMPOUND_EXPR will be an argument which must be evaluated.
10790 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10791 COMPOUND_EXPR in the chain will contain the tree for the simplified
10792 form of the builtin function call. */
10795 fold_builtin_strchr (tree s1, tree s2, tree type)
10797 if (!validate_arg (s1, POINTER_TYPE)
10798 || !validate_arg (s2, INTEGER_TYPE))
10804 if (TREE_CODE (s2) != INTEGER_CST)
10807 p1 = c_getstr (s1);
10814 if (target_char_cast (s2, &c))
10817 r = strchr (p1, c);
10820 return build_int_cst (TREE_TYPE (s1), 0);
10822 /* Return an offset into the constant string argument. */
10823 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10824 s1, size_int (r - p1));
10825 return fold_convert (type, tem);
10831 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10832 the call, and TYPE is its return type.
10834 Return NULL_TREE if no simplification was possible, otherwise return the
10835 simplified form of the call as a tree.
10837 The simplified form may be a constant or other expression which
10838 computes the same value, but in a more efficient manner (including
10839 calls to other builtin functions).
10841 The call may contain arguments which need to be evaluated, but
10842 which are not useful to determine the result of the call. In
10843 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10844 COMPOUND_EXPR will be an argument which must be evaluated.
10845 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10846 COMPOUND_EXPR in the chain will contain the tree for the simplified
10847 form of the builtin function call. */
10850 fold_builtin_strrchr (tree s1, tree s2, tree type)
10852 if (!validate_arg (s1, POINTER_TYPE)
10853 || !validate_arg (s2, INTEGER_TYPE))
10860 if (TREE_CODE (s2) != INTEGER_CST)
10863 p1 = c_getstr (s1);
10870 if (target_char_cast (s2, &c))
10873 r = strrchr (p1, c);
10876 return build_int_cst (TREE_TYPE (s1), 0);
10878 /* Return an offset into the constant string argument. */
10879 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10880 s1, size_int (r - p1));
10881 return fold_convert (type, tem);
10884 if (! integer_zerop (s2))
10887 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10891 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10892 return build_call_expr (fn, 2, s1, s2);
10896 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10897 to the call, and TYPE is its return type.
10899 Return NULL_TREE if no simplification was possible, otherwise return the
10900 simplified form of the call as a tree.
10902 The simplified form may be a constant or other expression which
10903 computes the same value, but in a more efficient manner (including
10904 calls to other builtin functions).
10906 The call may contain arguments which need to be evaluated, but
10907 which are not useful to determine the result of the call. In
10908 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10909 COMPOUND_EXPR will be an argument which must be evaluated.
10910 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10911 COMPOUND_EXPR in the chain will contain the tree for the simplified
10912 form of the builtin function call. */
10915 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10917 if (!validate_arg (s1, POINTER_TYPE)
10918 || !validate_arg (s2, POINTER_TYPE))
10923 const char *p1, *p2;
10925 p2 = c_getstr (s2);
10929 p1 = c_getstr (s1);
10932 const char *r = strpbrk (p1, p2);
10936 return build_int_cst (TREE_TYPE (s1), 0);
10938 /* Return an offset into the constant string argument. */
10939 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10940 s1, size_int (r - p1));
10941 return fold_convert (type, tem);
10945 /* strpbrk(x, "") == NULL.
10946 Evaluate and ignore s1 in case it had side-effects. */
10947 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10950 return NULL_TREE; /* Really call strpbrk. */
10952 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10956 /* New argument list transforming strpbrk(s1, s2) to
10957 strchr(s1, s2[0]). */
10958 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10962 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10965 Return NULL_TREE if no simplification was possible, otherwise return the
10966 simplified form of the call as a tree.
10968 The simplified form may be a constant or other expression which
10969 computes the same value, but in a more efficient manner (including
10970 calls to other builtin functions).
10972 The call may contain arguments which need to be evaluated, but
10973 which are not useful to determine the result of the call. In
10974 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10975 COMPOUND_EXPR will be an argument which must be evaluated.
10976 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10977 COMPOUND_EXPR in the chain will contain the tree for the simplified
10978 form of the builtin function call. */
10981 fold_builtin_strcat (tree dst, tree src)
10983 if (!validate_arg (dst, POINTER_TYPE)
10984 || !validate_arg (src, POINTER_TYPE))
10988 const char *p = c_getstr (src);
10990 /* If the string length is zero, return the dst parameter. */
10991 if (p && *p == '\0')
10998 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10999 arguments to the call.
11001 Return NULL_TREE if no simplification was possible, otherwise return the
11002 simplified form of the call as a tree.
11004 The simplified form may be a constant or other expression which
11005 computes the same value, but in a more efficient manner (including
11006 calls to other builtin functions).
11008 The call may contain arguments which need to be evaluated, but
11009 which are not useful to determine the result of the call. In
11010 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11011 COMPOUND_EXPR will be an argument which must be evaluated.
11012 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11013 COMPOUND_EXPR in the chain will contain the tree for the simplified
11014 form of the builtin function call. */
11017 fold_builtin_strncat (tree dst, tree src, tree len)
11019 if (!validate_arg (dst, POINTER_TYPE)
11020 || !validate_arg (src, POINTER_TYPE)
11021 || !validate_arg (len, INTEGER_TYPE))
11025 const char *p = c_getstr (src);
11027 /* If the requested length is zero, or the src parameter string
11028 length is zero, return the dst parameter. */
11029 if (integer_zerop (len) || (p && *p == '\0'))
11030 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11032 /* If the requested len is greater than or equal to the string
11033 length, call strcat. */
11034 if (TREE_CODE (len) == INTEGER_CST && p
11035 && compare_tree_int (len, strlen (p)) >= 0)
11037 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11039 /* If the replacement _DECL isn't initialized, don't do the
11044 return build_call_expr (fn, 2, dst, src);
11050 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11053 Return NULL_TREE if no simplification was possible, otherwise return the
11054 simplified form of the call as a tree.
11056 The simplified form may be a constant or other expression which
11057 computes the same value, but in a more efficient manner (including
11058 calls to other builtin functions).
11060 The call may contain arguments which need to be evaluated, but
11061 which are not useful to determine the result of the call. In
11062 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11063 COMPOUND_EXPR will be an argument which must be evaluated.
11064 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11065 COMPOUND_EXPR in the chain will contain the tree for the simplified
11066 form of the builtin function call. */
11069 fold_builtin_strspn (tree s1, tree s2)
11071 if (!validate_arg (s1, POINTER_TYPE)
11072 || !validate_arg (s2, POINTER_TYPE))
11076 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11078 /* If both arguments are constants, evaluate at compile-time. */
11081 const size_t r = strspn (p1, p2);
11082 return size_int (r);
11085 /* If either argument is "", return NULL_TREE. */
11086 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11087 /* Evaluate and ignore both arguments in case either one has
11089 return omit_two_operands (integer_type_node, integer_zero_node,
11095 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11098 Return NULL_TREE if no simplification was possible, otherwise return the
11099 simplified form of the call as a tree.
11101 The simplified form may be a constant or other expression which
11102 computes the same value, but in a more efficient manner (including
11103 calls to other builtin functions).
11105 The call may contain arguments which need to be evaluated, but
11106 which are not useful to determine the result of the call. In
11107 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11108 COMPOUND_EXPR will be an argument which must be evaluated.
11109 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11110 COMPOUND_EXPR in the chain will contain the tree for the simplified
11111 form of the builtin function call. */
11114 fold_builtin_strcspn (tree s1, tree s2)
11116 if (!validate_arg (s1, POINTER_TYPE)
11117 || !validate_arg (s2, POINTER_TYPE))
11121 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11123 /* If both arguments are constants, evaluate at compile-time. */
11126 const size_t r = strcspn (p1, p2);
11127 return size_int (r);
11130 /* If the first argument is "", return NULL_TREE. */
11131 if (p1 && *p1 == '\0')
11133 /* Evaluate and ignore argument s2 in case it has
11135 return omit_one_operand (integer_type_node,
11136 integer_zero_node, s2);
11139 /* If the second argument is "", return __builtin_strlen(s1). */
11140 if (p2 && *p2 == '\0')
11142 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11144 /* If the replacement _DECL isn't initialized, don't do the
11149 return build_call_expr (fn, 1, s1);
11155 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11156 to the call. IGNORE is true if the value returned
11157 by the builtin will be ignored. UNLOCKED is true is true if this
11158 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11159 the known length of the string. Return NULL_TREE if no simplification
11163 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11165 /* If we're using an unlocked function, assume the other unlocked
11166 functions exist explicitly. */
11167 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11168 : implicit_built_in_decls[BUILT_IN_FPUTC];
11169 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11170 : implicit_built_in_decls[BUILT_IN_FWRITE];
11172 /* If the return value is used, don't do the transformation. */
11176 /* Verify the arguments in the original call. */
11177 if (!validate_arg (arg0, POINTER_TYPE)
11178 || !validate_arg (arg1, POINTER_TYPE))
11182 len = c_strlen (arg0, 0);
11184 /* Get the length of the string passed to fputs. If the length
11185 can't be determined, punt. */
11187 || TREE_CODE (len) != INTEGER_CST)
11190 switch (compare_tree_int (len, 1))
11192 case -1: /* length is 0, delete the call entirely . */
11193 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11195 case 0: /* length is 1, call fputc. */
11197 const char *p = c_getstr (arg0);
11202 return build_call_expr (fn_fputc, 2,
11203 build_int_cst (NULL_TREE, p[0]), arg1);
11209 case 1: /* length is greater than 1, call fwrite. */
11211 /* If optimizing for size keep fputs. */
11214 /* New argument list transforming fputs(string, stream) to
11215 fwrite(string, 1, len, stream). */
11217 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11222 gcc_unreachable ();
11227 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11228 produced. False otherwise. This is done so that we don't output the error
11229 or warning twice or three times. */
11231 fold_builtin_next_arg (tree exp, bool va_start_p)
11233 tree fntype = TREE_TYPE (current_function_decl);
11234 int nargs = call_expr_nargs (exp);
11237 if (TYPE_ARG_TYPES (fntype) == 0
11238 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11239 == void_type_node))
11241 error ("%<va_start%> used in function with fixed args");
11247 if (va_start_p && (nargs != 2))
11249 error ("wrong number of arguments to function %<va_start%>");
11252 arg = CALL_EXPR_ARG (exp, 1);
11254 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11255 when we checked the arguments and if needed issued a warning. */
11260 /* Evidently an out of date version of <stdarg.h>; can't validate
11261 va_start's second argument, but can still work as intended. */
11262 warning (0, "%<__builtin_next_arg%> called without an argument");
11265 else if (nargs > 1)
11267 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11270 arg = CALL_EXPR_ARG (exp, 0);
11273 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11274 or __builtin_next_arg (0) the first time we see it, after checking
11275 the arguments and if needed issuing a warning. */
11276 if (!integer_zerop (arg))
11278 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11280 /* Strip off all nops for the sake of the comparison. This
11281 is not quite the same as STRIP_NOPS. It does more.
11282 We must also strip off INDIRECT_EXPR for C++ reference
11284 while (TREE_CODE (arg) == NOP_EXPR
11285 || TREE_CODE (arg) == CONVERT_EXPR
11286 || TREE_CODE (arg) == NON_LVALUE_EXPR
11287 || TREE_CODE (arg) == INDIRECT_REF)
11288 arg = TREE_OPERAND (arg, 0);
11289 if (arg != last_parm)
11291 /* FIXME: Sometimes with the tree optimizers we can get the
11292 not the last argument even though the user used the last
11293 argument. We just warn and set the arg to be the last
11294 argument so that we will get wrong-code because of
11296 warning (0, "second parameter of %<va_start%> not last named argument");
11298 /* We want to verify the second parameter just once before the tree
11299 optimizers are run and then avoid keeping it in the tree,
11300 as otherwise we could warn even for correct code like:
11301 void foo (int i, ...)
11302 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11304 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11306 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11312 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11313 ORIG may be null if this is a 2-argument call. We don't attempt to
11314 simplify calls with more than 3 arguments.
11316 Return NULL_TREE if no simplification was possible, otherwise return the
11317 simplified form of the call as a tree. If IGNORED is true, it means that
11318 the caller does not use the returned value of the function. */
11321 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11324 const char *fmt_str = NULL;
11326 /* Verify the required arguments in the original call. We deal with two
11327 types of sprintf() calls: 'sprintf (str, fmt)' and
11328 'sprintf (dest, "%s", orig)'. */
11329 if (!validate_arg (dest, POINTER_TYPE)
11330 || !validate_arg (fmt, POINTER_TYPE))
11332 if (orig && !validate_arg (orig, POINTER_TYPE))
11335 /* Check whether the format is a literal string constant. */
11336 fmt_str = c_getstr (fmt);
11337 if (fmt_str == NULL)
11341 retval = NULL_TREE;
11343 if (!init_target_chars ())
11346 /* If the format doesn't contain % args or %%, use strcpy. */
11347 if (strchr (fmt_str, target_percent) == NULL)
11349 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11354 /* Don't optimize sprintf (buf, "abc", ptr++). */
11358 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11359 'format' is known to contain no % formats. */
11360 call = build_call_expr (fn, 2, dest, fmt);
11362 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11365 /* If the format is "%s", use strcpy if the result isn't used. */
11366 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11369 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11374 /* Don't crash on sprintf (str1, "%s"). */
11378 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11381 retval = c_strlen (orig, 1);
11382 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11385 call = build_call_expr (fn, 2, dest, orig);
11388 if (call && retval)
11390 retval = fold_convert
11391 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11393 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11399 /* Expand a call EXP to __builtin_object_size. */
11402 expand_builtin_object_size (tree exp)
11405 int object_size_type;
11406 tree fndecl = get_callee_fndecl (exp);
11407 location_t locus = EXPR_LOCATION (exp);
11409 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11411 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11413 expand_builtin_trap ();
11417 ost = CALL_EXPR_ARG (exp, 1);
11420 if (TREE_CODE (ost) != INTEGER_CST
11421 || tree_int_cst_sgn (ost) < 0
11422 || compare_tree_int (ost, 3) > 0)
11424 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11426 expand_builtin_trap ();
11430 object_size_type = tree_low_cst (ost, 0);
11432 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11435 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11436 FCODE is the BUILT_IN_* to use.
11437 Return NULL_RTX if we failed; the caller should emit a normal call,
11438 otherwise try to get the result in TARGET, if convenient (and in
11439 mode MODE if that's convenient). */
11442 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11443 enum built_in_function fcode)
11445 tree dest, src, len, size;
11447 if (!validate_arglist (exp,
11449 fcode == BUILT_IN_MEMSET_CHK
11450 ? INTEGER_TYPE : POINTER_TYPE,
11451 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11454 dest = CALL_EXPR_ARG (exp, 0);
11455 src = CALL_EXPR_ARG (exp, 1);
11456 len = CALL_EXPR_ARG (exp, 2);
11457 size = CALL_EXPR_ARG (exp, 3);
11459 if (! host_integerp (size, 1))
11462 if (host_integerp (len, 1) || integer_all_onesp (size))
11466 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11468 location_t locus = EXPR_LOCATION (exp);
11469 warning (0, "%Hcall to %D will always overflow destination buffer",
11470 &locus, get_callee_fndecl (exp));
11475 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11476 mem{cpy,pcpy,move,set} is available. */
11479 case BUILT_IN_MEMCPY_CHK:
11480 fn = built_in_decls[BUILT_IN_MEMCPY];
11482 case BUILT_IN_MEMPCPY_CHK:
11483 fn = built_in_decls[BUILT_IN_MEMPCPY];
11485 case BUILT_IN_MEMMOVE_CHK:
11486 fn = built_in_decls[BUILT_IN_MEMMOVE];
11488 case BUILT_IN_MEMSET_CHK:
11489 fn = built_in_decls[BUILT_IN_MEMSET];
11498 fn = build_call_expr (fn, 3, dest, src, len);
11499 if (TREE_CODE (fn) == CALL_EXPR)
11500 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11501 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11503 else if (fcode == BUILT_IN_MEMSET_CHK)
11507 unsigned int dest_align
11508 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11510 /* If DEST is not a pointer type, call the normal function. */
11511 if (dest_align == 0)
11514 /* If SRC and DEST are the same (and not volatile), do nothing. */
11515 if (operand_equal_p (src, dest, 0))
11519 if (fcode != BUILT_IN_MEMPCPY_CHK)
11521 /* Evaluate and ignore LEN in case it has side-effects. */
11522 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11523 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11526 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11527 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11530 /* __memmove_chk special case. */
11531 if (fcode == BUILT_IN_MEMMOVE_CHK)
11533 unsigned int src_align
11534 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11536 if (src_align == 0)
11539 /* If src is categorized for a readonly section we can use
11540 normal __memcpy_chk. */
11541 if (readonly_data_expr (src))
11543 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11546 fn = build_call_expr (fn, 4, dest, src, len, size);
11547 if (TREE_CODE (fn) == CALL_EXPR)
11548 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11549 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11556 /* Emit warning if a buffer overflow is detected at compile time. */
11559 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11567 case BUILT_IN_STRCPY_CHK:
11568 case BUILT_IN_STPCPY_CHK:
11569 /* For __strcat_chk the warning will be emitted only if overflowing
11570 by at least strlen (dest) + 1 bytes. */
11571 case BUILT_IN_STRCAT_CHK:
11572 len = CALL_EXPR_ARG (exp, 1);
11573 size = CALL_EXPR_ARG (exp, 2);
11576 case BUILT_IN_STRNCAT_CHK:
11577 case BUILT_IN_STRNCPY_CHK:
11578 len = CALL_EXPR_ARG (exp, 2);
11579 size = CALL_EXPR_ARG (exp, 3);
11581 case BUILT_IN_SNPRINTF_CHK:
11582 case BUILT_IN_VSNPRINTF_CHK:
11583 len = CALL_EXPR_ARG (exp, 1);
11584 size = CALL_EXPR_ARG (exp, 3);
11587 gcc_unreachable ();
11593 if (! host_integerp (size, 1) || integer_all_onesp (size))
11598 len = c_strlen (len, 1);
11599 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11602 else if (fcode == BUILT_IN_STRNCAT_CHK)
11604 tree src = CALL_EXPR_ARG (exp, 1);
11605 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11607 src = c_strlen (src, 1);
11608 if (! src || ! host_integerp (src, 1))
11610 locus = EXPR_LOCATION (exp);
11611 warning (0, "%Hcall to %D might overflow destination buffer",
11612 &locus, get_callee_fndecl (exp));
11615 else if (tree_int_cst_lt (src, size))
11618 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11621 locus = EXPR_LOCATION (exp);
11622 warning (0, "%Hcall to %D will always overflow destination buffer",
11623 &locus, get_callee_fndecl (exp));
11626 /* Emit warning if a buffer overflow is detected at compile time
11627 in __sprintf_chk/__vsprintf_chk calls. */
11630 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11632 tree dest, size, len, fmt, flag;
11633 const char *fmt_str;
11634 int nargs = call_expr_nargs (exp);
11636 /* Verify the required arguments in the original call. */
11640 dest = CALL_EXPR_ARG (exp, 0);
11641 flag = CALL_EXPR_ARG (exp, 1);
11642 size = CALL_EXPR_ARG (exp, 2);
11643 fmt = CALL_EXPR_ARG (exp, 3);
11645 if (! host_integerp (size, 1) || integer_all_onesp (size))
11648 /* Check whether the format is a literal string constant. */
11649 fmt_str = c_getstr (fmt);
11650 if (fmt_str == NULL)
11653 if (!init_target_chars ())
11656 /* If the format doesn't contain % args or %%, we know its size. */
11657 if (strchr (fmt_str, target_percent) == 0)
11658 len = build_int_cstu (size_type_node, strlen (fmt_str));
11659 /* If the format is "%s" and first ... argument is a string literal,
11661 else if (fcode == BUILT_IN_SPRINTF_CHK
11662 && strcmp (fmt_str, target_percent_s) == 0)
11668 arg = CALL_EXPR_ARG (exp, 4);
11669 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11672 len = c_strlen (arg, 1);
11673 if (!len || ! host_integerp (len, 1))
11679 if (! tree_int_cst_lt (len, size))
11681 location_t locus = EXPR_LOCATION (exp);
11682 warning (0, "%Hcall to %D will always overflow destination buffer",
11683 &locus, get_callee_fndecl (exp));
11687 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11691 fold_builtin_object_size (tree ptr, tree ost)
11693 tree ret = NULL_TREE;
11694 int object_size_type;
11696 if (!validate_arg (ptr, POINTER_TYPE)
11697 || !validate_arg (ost, INTEGER_TYPE))
11702 if (TREE_CODE (ost) != INTEGER_CST
11703 || tree_int_cst_sgn (ost) < 0
11704 || compare_tree_int (ost, 3) > 0)
11707 object_size_type = tree_low_cst (ost, 0);
11709 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11710 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11711 and (size_t) 0 for types 2 and 3. */
11712 if (TREE_SIDE_EFFECTS (ptr))
11713 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11715 if (TREE_CODE (ptr) == ADDR_EXPR)
11716 ret = build_int_cstu (size_type_node,
11717 compute_builtin_object_size (ptr, object_size_type));
11719 else if (TREE_CODE (ptr) == SSA_NAME)
11721 unsigned HOST_WIDE_INT bytes;
11723 /* If object size is not known yet, delay folding until
11724 later. Maybe subsequent passes will help determining
11726 bytes = compute_builtin_object_size (ptr, object_size_type);
11727 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11729 ret = build_int_cstu (size_type_node, bytes);
11734 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11735 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11736 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11743 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11744 DEST, SRC, LEN, and SIZE are the arguments to the call.
11745 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11746 code of the builtin. If MAXLEN is not NULL, it is maximum length
11747 passed as third argument. */
11750 fold_builtin_memory_chk (tree fndecl,
11751 tree dest, tree src, tree len, tree size,
11752 tree maxlen, bool ignore,
11753 enum built_in_function fcode)
11757 if (!validate_arg (dest, POINTER_TYPE)
11758 || !validate_arg (src,
11759 (fcode == BUILT_IN_MEMSET_CHK
11760 ? INTEGER_TYPE : POINTER_TYPE))
11761 || !validate_arg (len, INTEGER_TYPE)
11762 || !validate_arg (size, INTEGER_TYPE))
11765 /* If SRC and DEST are the same (and not volatile), return DEST
11766 (resp. DEST+LEN for __mempcpy_chk). */
11767 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11769 if (fcode != BUILT_IN_MEMPCPY_CHK)
11770 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11773 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11774 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11778 if (! host_integerp (size, 1))
11781 if (! integer_all_onesp (size))
11783 if (! host_integerp (len, 1))
11785 /* If LEN is not constant, try MAXLEN too.
11786 For MAXLEN only allow optimizing into non-_ocs function
11787 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11788 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11790 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11792 /* (void) __mempcpy_chk () can be optimized into
11793 (void) __memcpy_chk (). */
11794 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11798 return build_call_expr (fn, 4, dest, src, len, size);
11806 if (tree_int_cst_lt (size, maxlen))
11811 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11812 mem{cpy,pcpy,move,set} is available. */
11815 case BUILT_IN_MEMCPY_CHK:
11816 fn = built_in_decls[BUILT_IN_MEMCPY];
11818 case BUILT_IN_MEMPCPY_CHK:
11819 fn = built_in_decls[BUILT_IN_MEMPCPY];
11821 case BUILT_IN_MEMMOVE_CHK:
11822 fn = built_in_decls[BUILT_IN_MEMMOVE];
11824 case BUILT_IN_MEMSET_CHK:
11825 fn = built_in_decls[BUILT_IN_MEMSET];
11834 return build_call_expr (fn, 3, dest, src, len);
11837 /* Fold a call to the __st[rp]cpy_chk builtin.
11838 DEST, SRC, and SIZE are the arguments to the call.
11839 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11840 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11841 strings passed as second argument. */
11844 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11845 tree maxlen, bool ignore,
11846 enum built_in_function fcode)
11850 if (!validate_arg (dest, POINTER_TYPE)
11851 || !validate_arg (src, POINTER_TYPE)
11852 || !validate_arg (size, INTEGER_TYPE))
11855 /* If SRC and DEST are the same (and not volatile), return DEST. */
11856 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11857 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11859 if (! host_integerp (size, 1))
11862 if (! integer_all_onesp (size))
11864 len = c_strlen (src, 1);
11865 if (! len || ! host_integerp (len, 1))
11867 /* If LEN is not constant, try MAXLEN too.
11868 For MAXLEN only allow optimizing into non-_ocs function
11869 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11870 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11872 if (fcode == BUILT_IN_STPCPY_CHK)
11877 /* If return value of __stpcpy_chk is ignored,
11878 optimize into __strcpy_chk. */
11879 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11883 return build_call_expr (fn, 3, dest, src, size);
11886 if (! len || TREE_SIDE_EFFECTS (len))
11889 /* If c_strlen returned something, but not a constant,
11890 transform __strcpy_chk into __memcpy_chk. */
11891 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11895 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11896 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11897 build_call_expr (fn, 4,
11898 dest, src, len, size));
11904 if (! tree_int_cst_lt (maxlen, size))
11908 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11909 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11910 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11914 return build_call_expr (fn, 2, dest, src);
11917 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11918 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11919 length passed as third argument. */
11922 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11927 if (!validate_arg (dest, POINTER_TYPE)
11928 || !validate_arg (src, POINTER_TYPE)
11929 || !validate_arg (len, INTEGER_TYPE)
11930 || !validate_arg (size, INTEGER_TYPE))
11933 if (! host_integerp (size, 1))
11936 if (! integer_all_onesp (size))
11938 if (! host_integerp (len, 1))
11940 /* If LEN is not constant, try MAXLEN too.
11941 For MAXLEN only allow optimizing into non-_ocs function
11942 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11943 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11949 if (tree_int_cst_lt (size, maxlen))
11953 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11954 fn = built_in_decls[BUILT_IN_STRNCPY];
11958 return build_call_expr (fn, 3, dest, src, len);
11961 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11962 are the arguments to the call. */
11965 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11970 if (!validate_arg (dest, POINTER_TYPE)
11971 || !validate_arg (src, POINTER_TYPE)
11972 || !validate_arg (size, INTEGER_TYPE))
11975 p = c_getstr (src);
11976 /* If the SRC parameter is "", return DEST. */
11977 if (p && *p == '\0')
11978 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11980 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11983 /* If __builtin_strcat_chk is used, assume strcat is available. */
11984 fn = built_in_decls[BUILT_IN_STRCAT];
11988 return build_call_expr (fn, 2, dest, src);
11991 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11995 fold_builtin_strncat_chk (tree fndecl,
11996 tree dest, tree src, tree len, tree size)
12001 if (!validate_arg (dest, POINTER_TYPE)
12002 || !validate_arg (src, POINTER_TYPE)
12003 || !validate_arg (size, INTEGER_TYPE)
12004 || !validate_arg (size, INTEGER_TYPE))
12007 p = c_getstr (src);
12008 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12009 if (p && *p == '\0')
12010 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12011 else if (integer_zerop (len))
12012 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12014 if (! host_integerp (size, 1))
12017 if (! integer_all_onesp (size))
12019 tree src_len = c_strlen (src, 1);
12021 && host_integerp (src_len, 1)
12022 && host_integerp (len, 1)
12023 && ! tree_int_cst_lt (len, src_len))
12025 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12026 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12030 return build_call_expr (fn, 3, dest, src, size);
12035 /* If __builtin_strncat_chk is used, assume strncat is available. */
12036 fn = built_in_decls[BUILT_IN_STRNCAT];
12040 return build_call_expr (fn, 3, dest, src, len);
12043 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12044 a normal call should be emitted rather than expanding the function
12045 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12048 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12050 tree dest, size, len, fn, fmt, flag;
12051 const char *fmt_str;
12052 int nargs = call_expr_nargs (exp);
12054 /* Verify the required arguments in the original call. */
12057 dest = CALL_EXPR_ARG (exp, 0);
12058 if (!validate_arg (dest, POINTER_TYPE))
12060 flag = CALL_EXPR_ARG (exp, 1);
12061 if (!validate_arg (flag, INTEGER_TYPE))
12063 size = CALL_EXPR_ARG (exp, 2);
12064 if (!validate_arg (size, INTEGER_TYPE))
12066 fmt = CALL_EXPR_ARG (exp, 3);
12067 if (!validate_arg (fmt, POINTER_TYPE))
12070 if (! host_integerp (size, 1))
12075 if (!init_target_chars ())
12078 /* Check whether the format is a literal string constant. */
12079 fmt_str = c_getstr (fmt);
12080 if (fmt_str != NULL)
12082 /* If the format doesn't contain % args or %%, we know the size. */
12083 if (strchr (fmt_str, target_percent) == 0)
12085 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12086 len = build_int_cstu (size_type_node, strlen (fmt_str));
12088 /* If the format is "%s" and first ... argument is a string literal,
12089 we know the size too. */
12090 else if (fcode == BUILT_IN_SPRINTF_CHK
12091 && strcmp (fmt_str, target_percent_s) == 0)
12097 arg = CALL_EXPR_ARG (exp, 4);
12098 if (validate_arg (arg, POINTER_TYPE))
12100 len = c_strlen (arg, 1);
12101 if (! len || ! host_integerp (len, 1))
12108 if (! integer_all_onesp (size))
12110 if (! len || ! tree_int_cst_lt (len, size))
12114 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12115 or if format doesn't contain % chars or is "%s". */
12116 if (! integer_zerop (flag))
12118 if (fmt_str == NULL)
12120 if (strchr (fmt_str, target_percent) != NULL
12121 && strcmp (fmt_str, target_percent_s))
12125 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12126 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12127 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12131 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12134 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12135 a normal call should be emitted rather than expanding the function
12136 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12137 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12138 passed as second argument. */
12141 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12142 enum built_in_function fcode)
12144 tree dest, size, len, fn, fmt, flag;
12145 const char *fmt_str;
12147 /* Verify the required arguments in the original call. */
12148 if (call_expr_nargs (exp) < 5)
12150 dest = CALL_EXPR_ARG (exp, 0);
12151 if (!validate_arg (dest, POINTER_TYPE))
12153 len = CALL_EXPR_ARG (exp, 1);
12154 if (!validate_arg (len, INTEGER_TYPE))
12156 flag = CALL_EXPR_ARG (exp, 2);
12157 if (!validate_arg (flag, INTEGER_TYPE))
12159 size = CALL_EXPR_ARG (exp, 3);
12160 if (!validate_arg (size, INTEGER_TYPE))
12162 fmt = CALL_EXPR_ARG (exp, 4);
12163 if (!validate_arg (fmt, POINTER_TYPE))
12166 if (! host_integerp (size, 1))
12169 if (! integer_all_onesp (size))
12171 if (! host_integerp (len, 1))
12173 /* If LEN is not constant, try MAXLEN too.
12174 For MAXLEN only allow optimizing into non-_ocs function
12175 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12176 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12182 if (tree_int_cst_lt (size, maxlen))
12186 if (!init_target_chars ())
12189 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12190 or if format doesn't contain % chars or is "%s". */
12191 if (! integer_zerop (flag))
12193 fmt_str = c_getstr (fmt);
12194 if (fmt_str == NULL)
12196 if (strchr (fmt_str, target_percent) != NULL
12197 && strcmp (fmt_str, target_percent_s))
12201 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12203 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12204 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12208 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12211 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12212 FMT and ARG are the arguments to the call; we don't fold cases with
12213 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12215 Return NULL_TREE if no simplification was possible, otherwise return the
12216 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12217 code of the function to be simplified. */
12220 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12221 enum built_in_function fcode)
12223 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12224 const char *fmt_str = NULL;
12226 /* If the return value is used, don't do the transformation. */
12230 /* Verify the required arguments in the original call. */
12231 if (!validate_arg (fmt, POINTER_TYPE))
12234 /* Check whether the format is a literal string constant. */
12235 fmt_str = c_getstr (fmt);
12236 if (fmt_str == NULL)
12239 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12241 /* If we're using an unlocked function, assume the other
12242 unlocked functions exist explicitly. */
12243 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12244 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12248 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12249 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12252 if (!init_target_chars ())
12255 if (strcmp (fmt_str, target_percent_s) == 0
12256 || strchr (fmt_str, target_percent) == NULL)
12260 if (strcmp (fmt_str, target_percent_s) == 0)
12262 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12265 if (!arg || !validate_arg (arg, POINTER_TYPE))
12268 str = c_getstr (arg);
12274 /* The format specifier doesn't contain any '%' characters. */
12275 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12281 /* If the string was "", printf does nothing. */
12282 if (str[0] == '\0')
12283 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12285 /* If the string has length of 1, call putchar. */
12286 if (str[1] == '\0')
12288 /* Given printf("c"), (where c is any one character,)
12289 convert "c"[0] to an int and pass that to the replacement
12291 newarg = build_int_cst (NULL_TREE, str[0]);
12293 call = build_call_expr (fn_putchar, 1, newarg);
12297 /* If the string was "string\n", call puts("string"). */
12298 size_t len = strlen (str);
12299 if ((unsigned char)str[len - 1] == target_newline)
12301 /* Create a NUL-terminated string that's one char shorter
12302 than the original, stripping off the trailing '\n'. */
12303 char *newstr = alloca (len);
12304 memcpy (newstr, str, len - 1);
12305 newstr[len - 1] = 0;
12307 newarg = build_string_literal (len, newstr);
12309 call = build_call_expr (fn_puts, 1, newarg);
12312 /* We'd like to arrange to call fputs(string,stdout) here,
12313 but we need stdout and don't have a way to get it yet. */
12318 /* The other optimizations can be done only on the non-va_list variants. */
12319 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12322 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12323 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12325 if (!arg || !validate_arg (arg, POINTER_TYPE))
12328 call = build_call_expr (fn_puts, 1, arg);
12331 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12332 else if (strcmp (fmt_str, target_percent_c) == 0)
12334 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12337 call = build_call_expr (fn_putchar, 1, arg);
12343 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12346 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12347 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12348 more than 3 arguments, and ARG may be null in the 2-argument case.
12350 Return NULL_TREE if no simplification was possible, otherwise return the
12351 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12352 code of the function to be simplified. */
12355 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12356 enum built_in_function fcode)
12358 tree fn_fputc, fn_fputs, call = NULL_TREE;
12359 const char *fmt_str = NULL;
12361 /* If the return value is used, don't do the transformation. */
12365 /* Verify the required arguments in the original call. */
12366 if (!validate_arg (fp, POINTER_TYPE))
12368 if (!validate_arg (fmt, POINTER_TYPE))
12371 /* Check whether the format is a literal string constant. */
12372 fmt_str = c_getstr (fmt);
12373 if (fmt_str == NULL)
12376 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12378 /* If we're using an unlocked function, assume the other
12379 unlocked functions exist explicitly. */
12380 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12381 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12385 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12386 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12389 if (!init_target_chars ())
12392 /* If the format doesn't contain % args or %%, use strcpy. */
12393 if (strchr (fmt_str, target_percent) == NULL)
12395 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12399 /* If the format specifier was "", fprintf does nothing. */
12400 if (fmt_str[0] == '\0')
12402 /* If FP has side-effects, just wait until gimplification is
12404 if (TREE_SIDE_EFFECTS (fp))
12407 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12410 /* When "string" doesn't contain %, replace all cases of
12411 fprintf (fp, string) with fputs (string, fp). The fputs
12412 builtin will take care of special cases like length == 1. */
12414 call = build_call_expr (fn_fputs, 2, fmt, fp);
12417 /* The other optimizations can be done only on the non-va_list variants. */
12418 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12421 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12422 else if (strcmp (fmt_str, target_percent_s) == 0)
12424 if (!arg || !validate_arg (arg, POINTER_TYPE))
12427 call = build_call_expr (fn_fputs, 2, arg, fp);
12430 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12431 else if (strcmp (fmt_str, target_percent_c) == 0)
12433 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12436 call = build_call_expr (fn_fputc, 2, arg, fp);
12441 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12444 /* Initialize format string characters in the target charset. */
12447 init_target_chars (void)
12452 target_newline = lang_hooks.to_target_charset ('\n');
12453 target_percent = lang_hooks.to_target_charset ('%');
12454 target_c = lang_hooks.to_target_charset ('c');
12455 target_s = lang_hooks.to_target_charset ('s');
12456 if (target_newline == 0 || target_percent == 0 || target_c == 0
12460 target_percent_c[0] = target_percent;
12461 target_percent_c[1] = target_c;
12462 target_percent_c[2] = '\0';
12464 target_percent_s[0] = target_percent;
12465 target_percent_s[1] = target_s;
12466 target_percent_s[2] = '\0';
12468 target_percent_s_newline[0] = target_percent;
12469 target_percent_s_newline[1] = target_s;
12470 target_percent_s_newline[2] = target_newline;
12471 target_percent_s_newline[3] = '\0';
12478 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12479 and no overflow/underflow occurred. INEXACT is true if M was not
12480 exactly calculated. TYPE is the tree type for the result. This
12481 function assumes that you cleared the MPFR flags and then
12482 calculated M to see if anything subsequently set a flag prior to
12483 entering this function. Return NULL_TREE if any checks fail. */
12486 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12488 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12489 overflow/underflow occurred. If -frounding-math, proceed iff the
12490 result of calling FUNC was exact. */
12491 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12492 && (!flag_rounding_math || !inexact))
12494 REAL_VALUE_TYPE rr;
12496 real_from_mpfr (&rr, m, type, GMP_RNDN);
12497 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12498 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12499 but the mpft_t is not, then we underflowed in the
12501 if (real_isfinite (&rr)
12502 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12504 REAL_VALUE_TYPE rmode;
12506 real_convert (&rmode, TYPE_MODE (type), &rr);
12507 /* Proceed iff the specified mode can hold the value. */
12508 if (real_identical (&rmode, &rr))
12509 return build_real (type, rmode);
12515 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12516 FUNC on it and return the resulting value as a tree with type TYPE.
12517 If MIN and/or MAX are not NULL, then the supplied ARG must be
12518 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12519 acceptable values, otherwise they are not. The mpfr precision is
12520 set to the precision of TYPE. We assume that function FUNC returns
12521 zero if the result could be calculated exactly within the requested
12525 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12526 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12529 tree result = NULL_TREE;
12533 /* To proceed, MPFR must exactly represent the target floating point
12534 format, which only happens when the target base equals two. */
12535 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12536 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12538 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12540 if (real_isfinite (ra)
12541 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12542 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12544 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12548 mpfr_init2 (m, prec);
12549 mpfr_from_real (m, ra, GMP_RNDN);
12550 mpfr_clear_flags ();
12551 inexact = func (m, m, GMP_RNDN);
12552 result = do_mpfr_ckconv (m, type, inexact);
12560 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12561 FUNC on it and return the resulting value as a tree with type TYPE.
12562 The mpfr precision is set to the precision of TYPE. We assume that
12563 function FUNC returns zero if the result could be calculated
12564 exactly within the requested precision. */
12567 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12568 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12570 tree result = NULL_TREE;
12575 /* To proceed, MPFR must exactly represent the target floating point
12576 format, which only happens when the target base equals two. */
12577 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12578 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12579 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12581 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12582 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12584 if (real_isfinite (ra1) && real_isfinite (ra2))
12586 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12590 mpfr_inits2 (prec, m1, m2, NULL);
12591 mpfr_from_real (m1, ra1, GMP_RNDN);
12592 mpfr_from_real (m2, ra2, GMP_RNDN);
12593 mpfr_clear_flags ();
12594 inexact = func (m1, m1, m2, GMP_RNDN);
12595 result = do_mpfr_ckconv (m1, type, inexact);
12596 mpfr_clears (m1, m2, NULL);
12603 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12604 FUNC on it and return the resulting value as a tree with type TYPE.
12605 The mpfr precision is set to the precision of TYPE. We assume that
12606 function FUNC returns zero if the result could be calculated
12607 exactly within the requested precision. */
12610 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12611 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12613 tree result = NULL_TREE;
12619 /* To proceed, MPFR must exactly represent the target floating point
12620 format, which only happens when the target base equals two. */
12621 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12622 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12623 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12624 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12626 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12627 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12628 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12630 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12632 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12636 mpfr_inits2 (prec, m1, m2, m3, NULL);
12637 mpfr_from_real (m1, ra1, GMP_RNDN);
12638 mpfr_from_real (m2, ra2, GMP_RNDN);
12639 mpfr_from_real (m3, ra3, GMP_RNDN);
12640 mpfr_clear_flags ();
12641 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12642 result = do_mpfr_ckconv (m1, type, inexact);
12643 mpfr_clears (m1, m2, m3, NULL);
12650 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12651 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12652 If ARG_SINP and ARG_COSP are NULL then the result is returned
12653 as a complex value.
12654 The type is taken from the type of ARG and is used for setting the
12655 precision of the calculation and results. */
12658 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12660 tree const type = TREE_TYPE (arg);
12661 tree result = NULL_TREE;
12665 /* To proceed, MPFR must exactly represent the target floating point
12666 format, which only happens when the target base equals two. */
12667 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12668 && TREE_CODE (arg) == REAL_CST
12669 && !TREE_OVERFLOW (arg))
12671 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12673 if (real_isfinite (ra))
12675 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12676 tree result_s, result_c;
12680 mpfr_inits2 (prec, m, ms, mc, NULL);
12681 mpfr_from_real (m, ra, GMP_RNDN);
12682 mpfr_clear_flags ();
12683 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12684 result_s = do_mpfr_ckconv (ms, type, inexact);
12685 result_c = do_mpfr_ckconv (mc, type, inexact);
12686 mpfr_clears (m, ms, mc, NULL);
12687 if (result_s && result_c)
12689 /* If we are to return in a complex value do so. */
12690 if (!arg_sinp && !arg_cosp)
12691 return build_complex (build_complex_type (type),
12692 result_c, result_s);
12694 /* Dereference the sin/cos pointer arguments. */
12695 arg_sinp = build_fold_indirect_ref (arg_sinp);
12696 arg_cosp = build_fold_indirect_ref (arg_cosp);
12697 /* Proceed if valid pointer type were passed in. */
12698 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12699 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12701 /* Set the values. */
12702 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12704 TREE_SIDE_EFFECTS (result_s) = 1;
12705 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12707 TREE_SIDE_EFFECTS (result_c) = 1;
12708 /* Combine the assignments into a compound expr. */
12709 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12710 result_s, result_c));
12718 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12719 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12720 two-argument mpfr order N Bessel function FUNC on them and return
12721 the resulting value as a tree with type TYPE. The mpfr precision
12722 is set to the precision of TYPE. We assume that function FUNC
12723 returns zero if the result could be calculated exactly within the
12724 requested precision. */
12726 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12727 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12728 const REAL_VALUE_TYPE *min, bool inclusive)
12730 tree result = NULL_TREE;
12735 /* To proceed, MPFR must exactly represent the target floating point
12736 format, which only happens when the target base equals two. */
12737 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12738 && host_integerp (arg1, 0)
12739 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12741 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12742 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12745 && real_isfinite (ra)
12746 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12748 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12752 mpfr_init2 (m, prec);
12753 mpfr_from_real (m, ra, GMP_RNDN);
12754 mpfr_clear_flags ();
12755 inexact = func (m, n, m, GMP_RNDN);
12756 result = do_mpfr_ckconv (m, type, inexact);
12764 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12765 the pointer *(ARG_QUO) and return the result. The type is taken
12766 from the type of ARG0 and is used for setting the precision of the
12767 calculation and results. */
12770 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12772 tree const type = TREE_TYPE (arg0);
12773 tree result = NULL_TREE;
12778 /* To proceed, MPFR must exactly represent the target floating point
12779 format, which only happens when the target base equals two. */
12780 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12781 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12782 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12784 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12785 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12787 if (real_isfinite (ra0) && real_isfinite (ra1))
12789 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12794 mpfr_inits2 (prec, m0, m1, NULL);
12795 mpfr_from_real (m0, ra0, GMP_RNDN);
12796 mpfr_from_real (m1, ra1, GMP_RNDN);
12797 mpfr_clear_flags ();
12798 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12799 /* Remquo is independent of the rounding mode, so pass
12800 inexact=0 to do_mpfr_ckconv(). */
12801 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12802 mpfr_clears (m0, m1, NULL);
12805 /* MPFR calculates quo in the host's long so it may
12806 return more bits in quo than the target int can hold
12807 if sizeof(host long) > sizeof(target int). This can
12808 happen even for native compilers in LP64 mode. In
12809 these cases, modulo the quo value with the largest
12810 number that the target int can hold while leaving one
12811 bit for the sign. */
12812 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12813 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12815 /* Dereference the quo pointer argument. */
12816 arg_quo = build_fold_indirect_ref (arg_quo);
12817 /* Proceed iff a valid pointer type was passed in. */
12818 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12820 /* Set the value. */
12821 tree result_quo = fold_build2 (MODIFY_EXPR,
12822 TREE_TYPE (arg_quo), arg_quo,
12823 build_int_cst (NULL, integer_quo));
12824 TREE_SIDE_EFFECTS (result_quo) = 1;
12825 /* Combine the quo assignment with the rem. */
12826 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12827 result_quo, result_rem));
12835 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12836 resulting value as a tree with type TYPE. The mpfr precision is
12837 set to the precision of TYPE. We assume that this mpfr function
12838 returns zero if the result could be calculated exactly within the
12839 requested precision. In addition, the integer pointer represented
12840 by ARG_SG will be dereferenced and set to the appropriate signgam
12844 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12846 tree result = NULL_TREE;
12850 /* To proceed, MPFR must exactly represent the target floating point
12851 format, which only happens when the target base equals two. Also
12852 verify ARG is a constant and that ARG_SG is an int pointer. */
12853 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12854 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12855 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12856 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12858 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12860 /* In addition to NaN and Inf, the argument cannot be zero or a
12861 negative integer. */
12862 if (real_isfinite (ra)
12863 && ra->cl != rvc_zero
12864 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12866 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12871 mpfr_init2 (m, prec);
12872 mpfr_from_real (m, ra, GMP_RNDN);
12873 mpfr_clear_flags ();
12874 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12875 result_lg = do_mpfr_ckconv (m, type, inexact);
12881 /* Dereference the arg_sg pointer argument. */
12882 arg_sg = build_fold_indirect_ref (arg_sg);
12883 /* Assign the signgam value into *arg_sg. */
12884 result_sg = fold_build2 (MODIFY_EXPR,
12885 TREE_TYPE (arg_sg), arg_sg,
12886 build_int_cst (NULL, sg));
12887 TREE_SIDE_EFFECTS (result_sg) = 1;
12888 /* Combine the signgam assignment with the lgamma result. */
12889 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12890 result_sg, result_lg));