1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_free_warning (tree);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
244 static bool called_as_built_in (tree node)
246 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
247 if (strncmp (name, "__builtin_", 10) == 0)
249 if (strncmp (name, "__sync_", 7) == 0)
254 /* Return the alignment in bits of EXP, an object.
255 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
256 guessed alignment e.g. from type alignment. */
259 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
264 if (handled_component_p (exp))
266 HOST_WIDE_INT bitsize, bitpos;
268 enum machine_mode mode;
269 int unsignedp, volatilep;
271 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
272 &mode, &unsignedp, &volatilep, true);
274 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
279 if (TREE_CODE (offset) == PLUS_EXPR)
281 next_offset = TREE_OPERAND (offset, 0);
282 offset = TREE_OPERAND (offset, 1);
286 if (host_integerp (offset, 1))
288 /* Any overflow in calculating offset_bits won't change
291 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
294 inner = MIN (inner, (offset_bits & -offset_bits));
296 else if (TREE_CODE (offset) == MULT_EXPR
297 && host_integerp (TREE_OPERAND (offset, 1), 1))
299 /* Any overflow in calculating offset_factor won't change
301 unsigned offset_factor
302 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
306 inner = MIN (inner, (offset_factor & -offset_factor));
310 inner = MIN (inner, BITS_PER_UNIT);
313 offset = next_offset;
317 align = MIN (inner, DECL_ALIGN (exp));
318 #ifdef CONSTANT_ALIGNMENT
319 else if (CONSTANT_CLASS_P (exp))
320 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
322 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
323 || TREE_CODE (exp) == INDIRECT_REF)
324 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
326 align = MIN (align, inner);
327 return MIN (align, max_align);
330 /* Return the alignment in bits of EXP, a pointer valued expression.
331 But don't return more than MAX_ALIGN no matter what.
332 The alignment returned is, by default, the alignment of the thing that
333 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
335 Otherwise, look at the expression to see if we can do better, i.e., if the
336 expression is actually pointing at an object whose alignment is tighter. */
339 get_pointer_alignment (tree exp, unsigned int max_align)
341 unsigned int align, inner;
343 /* We rely on TER to compute accurate alignment information. */
344 if (!(optimize && flag_tree_ter))
347 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
350 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
351 align = MIN (align, max_align);
355 switch (TREE_CODE (exp))
358 exp = TREE_OPERAND (exp, 0);
359 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
362 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
363 align = MIN (inner, max_align);
366 case POINTER_PLUS_EXPR:
367 /* If sum of pointer + int, restrict our maximum alignment to that
368 imposed by the integer. If not, we can't do any better than
370 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
373 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
374 & (max_align / BITS_PER_UNIT - 1))
378 exp = TREE_OPERAND (exp, 0);
382 /* See what we are pointing at and look at its alignment. */
383 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
391 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
392 way, because it could contain a zero byte in the middle.
393 TREE_STRING_LENGTH is the size of the character array, not the string.
395 ONLY_VALUE should be nonzero if the result is not going to be emitted
396 into the instruction stream and zero if it is going to be expanded.
397 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
398 is returned, otherwise NULL, since
399 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
400 evaluate the side-effects.
402 The value returned is of type `ssizetype'.
404 Unfortunately, string_constant can't access the values of const char
405 arrays with initializers, so neither can we do so here. */
408 c_strlen (tree src, int only_value)
411 HOST_WIDE_INT offset;
416 if (TREE_CODE (src) == COND_EXPR
417 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
421 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
422 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
423 if (tree_int_cst_equal (len1, len2))
427 if (TREE_CODE (src) == COMPOUND_EXPR
428 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
429 return c_strlen (TREE_OPERAND (src, 1), only_value);
431 src = string_constant (src, &offset_node);
435 max = TREE_STRING_LENGTH (src) - 1;
436 ptr = TREE_STRING_POINTER (src);
438 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
440 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
441 compute the offset to the following null if we don't know where to
442 start searching for it. */
445 for (i = 0; i < max; i++)
449 /* We don't know the starting offset, but we do know that the string
450 has no internal zero bytes. We can assume that the offset falls
451 within the bounds of the string; otherwise, the programmer deserves
452 what he gets. Subtract the offset from the length of the string,
453 and return that. This would perhaps not be valid if we were dealing
454 with named arrays in addition to literal string constants. */
456 return size_diffop (size_int (max), offset_node);
459 /* We have a known offset into the string. Start searching there for
460 a null character if we can represent it as a single HOST_WIDE_INT. */
461 if (offset_node == 0)
463 else if (! host_integerp (offset_node, 0))
466 offset = tree_low_cst (offset_node, 0);
468 /* If the offset is known to be out of bounds, warn, and call strlen at
470 if (offset < 0 || offset > max)
472 /* Suppress multiple warnings for propagated constant strings. */
473 if (! TREE_NO_WARNING (src))
475 warning (0, "offset outside bounds of constant string");
476 TREE_NO_WARNING (src) = 1;
481 /* Use strlen to search for the first zero byte. Since any strings
482 constructed with build_string will have nulls appended, we win even
483 if we get handed something like (char[4])"abcd".
485 Since OFFSET is our starting index into the string, no further
486 calculation is needed. */
487 return ssize_int (strlen (ptr + offset));
490 /* Return a char pointer for a C string if it is a string constant
491 or sum of string constant and integer constant. */
498 src = string_constant (src, &offset_node);
502 if (offset_node == 0)
503 return TREE_STRING_POINTER (src);
504 else if (!host_integerp (offset_node, 1)
505 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
508 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
511 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
512 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
515 c_readstr (const char *str, enum machine_mode mode)
521 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
526 for (i = 0; i < GET_MODE_SIZE (mode); i++)
529 if (WORDS_BIG_ENDIAN)
530 j = GET_MODE_SIZE (mode) - i - 1;
531 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
532 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
533 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
535 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
538 ch = (unsigned char) str[i];
539 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
541 return immed_double_const (c[0], c[1], mode);
544 /* Cast a target constant CST to target CHAR and if that value fits into
545 host char type, return zero and put that value into variable pointed to by
549 target_char_cast (tree cst, char *p)
551 unsigned HOST_WIDE_INT val, hostval;
553 if (!host_integerp (cst, 1)
554 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
557 val = tree_low_cst (cst, 1);
558 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
559 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
562 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
563 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
572 /* Similar to save_expr, but assumes that arbitrary code is not executed
573 in between the multiple evaluations. In particular, we assume that a
574 non-addressable local variable will not be modified. */
577 builtin_save_expr (tree exp)
579 if (TREE_ADDRESSABLE (exp) == 0
580 && (TREE_CODE (exp) == PARM_DECL
581 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
584 return save_expr (exp);
587 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
588 times to get the address of either a higher stack frame, or a return
589 address located within it (depending on FNDECL_CODE). */
592 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
596 #ifdef INITIAL_FRAME_ADDRESS_RTX
597 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
601 /* For a zero count with __builtin_return_address, we don't care what
602 frame address we return, because target-specific definitions will
603 override us. Therefore frame pointer elimination is OK, and using
604 the soft frame pointer is OK.
606 For a nonzero count, or a zero count with __builtin_frame_address,
607 we require a stable offset from the current frame pointer to the
608 previous one, so we must use the hard frame pointer, and
609 we must disable frame pointer elimination. */
610 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
611 tem = frame_pointer_rtx;
614 tem = hard_frame_pointer_rtx;
616 /* Tell reload not to eliminate the frame pointer. */
617 crtl->accesses_prior_frames = 1;
621 /* Some machines need special handling before we can access
622 arbitrary frames. For example, on the SPARC, we must first flush
623 all register windows to the stack. */
624 #ifdef SETUP_FRAME_ADDRESSES
626 SETUP_FRAME_ADDRESSES ();
629 /* On the SPARC, the return address is not in the frame, it is in a
630 register. There is no way to access it off of the current frame
631 pointer, but it can be accessed off the previous frame pointer by
632 reading the value from the register window save area. */
633 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
634 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
638 /* Scan back COUNT frames to the specified frame. */
639 for (i = 0; i < count; i++)
641 /* Assume the dynamic chain pointer is in the word that the
642 frame address points to, unless otherwise specified. */
643 #ifdef DYNAMIC_CHAIN_ADDRESS
644 tem = DYNAMIC_CHAIN_ADDRESS (tem);
646 tem = memory_address (Pmode, tem);
647 tem = gen_frame_mem (Pmode, tem);
648 tem = copy_to_reg (tem);
651 /* For __builtin_frame_address, return what we've got. But, on
652 the SPARC for example, we may have to add a bias. */
653 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
654 #ifdef FRAME_ADDR_RTX
655 return FRAME_ADDR_RTX (tem);
660 /* For __builtin_return_address, get the return address from that frame. */
661 #ifdef RETURN_ADDR_RTX
662 tem = RETURN_ADDR_RTX (count, tem);
664 tem = memory_address (Pmode,
665 plus_constant (tem, GET_MODE_SIZE (Pmode)));
666 tem = gen_frame_mem (Pmode, tem);
671 /* Alias set used for setjmp buffer. */
672 static alias_set_type setjmp_alias_set = -1;
674 /* Construct the leading half of a __builtin_setjmp call. Control will
675 return to RECEIVER_LABEL. This is also called directly by the SJLJ
676 exception handling code. */
679 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
681 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
685 if (setjmp_alias_set == -1)
686 setjmp_alias_set = new_alias_set ();
688 buf_addr = convert_memory_address (Pmode, buf_addr);
690 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
692 /* We store the frame pointer and the address of receiver_label in
693 the buffer and use the rest of it for the stack save area, which
694 is machine-dependent. */
696 mem = gen_rtx_MEM (Pmode, buf_addr);
697 set_mem_alias_set (mem, setjmp_alias_set);
698 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
700 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
701 set_mem_alias_set (mem, setjmp_alias_set);
703 emit_move_insn (validize_mem (mem),
704 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
706 stack_save = gen_rtx_MEM (sa_mode,
707 plus_constant (buf_addr,
708 2 * GET_MODE_SIZE (Pmode)));
709 set_mem_alias_set (stack_save, setjmp_alias_set);
710 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
712 /* If there is further processing to do, do it. */
713 #ifdef HAVE_builtin_setjmp_setup
714 if (HAVE_builtin_setjmp_setup)
715 emit_insn (gen_builtin_setjmp_setup (buf_addr));
718 /* Tell optimize_save_area_alloca that extra work is going to
719 need to go on during alloca. */
720 cfun->calls_setjmp = 1;
722 /* We have a nonlocal label. */
723 cfun->has_nonlocal_label = 1;
726 /* Construct the trailing part of a __builtin_setjmp call. This is
727 also called directly by the SJLJ exception handling code. */
730 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
732 /* Clobber the FP when we get here, so we have to make sure it's
733 marked as used by this function. */
734 emit_use (hard_frame_pointer_rtx);
736 /* Mark the static chain as clobbered here so life information
737 doesn't get messed up for it. */
738 emit_clobber (static_chain_rtx);
740 /* Now put in the code to restore the frame pointer, and argument
741 pointer, if needed. */
742 #ifdef HAVE_nonlocal_goto
743 if (! HAVE_nonlocal_goto)
746 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
747 /* This might change the hard frame pointer in ways that aren't
748 apparent to early optimization passes, so force a clobber. */
749 emit_clobber (hard_frame_pointer_rtx);
752 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
753 if (fixed_regs[ARG_POINTER_REGNUM])
755 #ifdef ELIMINABLE_REGS
757 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
759 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
760 if (elim_regs[i].from == ARG_POINTER_REGNUM
761 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
764 if (i == ARRAY_SIZE (elim_regs))
767 /* Now restore our arg pointer from the address at which it
768 was saved in our stack frame. */
769 emit_move_insn (crtl->args.internal_arg_pointer,
770 copy_to_reg (get_arg_pointer_save_area ()));
775 #ifdef HAVE_builtin_setjmp_receiver
776 if (HAVE_builtin_setjmp_receiver)
777 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
780 #ifdef HAVE_nonlocal_goto_receiver
781 if (HAVE_nonlocal_goto_receiver)
782 emit_insn (gen_nonlocal_goto_receiver ());
787 /* We must not allow the code we just generated to be reordered by
788 scheduling. Specifically, the update of the frame pointer must
789 happen immediately, not later. */
790 emit_insn (gen_blockage ());
793 /* __builtin_longjmp is passed a pointer to an array of five words (not
794 all will be used on all machines). It operates similarly to the C
795 library function of the same name, but is more efficient. Much of
796 the code below is copied from the handling of non-local gotos. */
799 expand_builtin_longjmp (rtx buf_addr, rtx value)
801 rtx fp, lab, stack, insn, last;
802 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
804 /* DRAP is needed for stack realign if longjmp is expanded to current
806 if (SUPPORTS_STACK_ALIGNMENT)
807 crtl->need_drap = true;
809 if (setjmp_alias_set == -1)
810 setjmp_alias_set = new_alias_set ();
812 buf_addr = convert_memory_address (Pmode, buf_addr);
814 buf_addr = force_reg (Pmode, buf_addr);
816 /* We used to store value in static_chain_rtx, but that fails if pointers
817 are smaller than integers. We instead require that the user must pass
818 a second argument of 1, because that is what builtin_setjmp will
819 return. This also makes EH slightly more efficient, since we are no
820 longer copying around a value that we don't care about. */
821 gcc_assert (value == const1_rtx);
823 last = get_last_insn ();
824 #ifdef HAVE_builtin_longjmp
825 if (HAVE_builtin_longjmp)
826 emit_insn (gen_builtin_longjmp (buf_addr));
830 fp = gen_rtx_MEM (Pmode, buf_addr);
831 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
832 GET_MODE_SIZE (Pmode)));
834 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
835 2 * GET_MODE_SIZE (Pmode)));
836 set_mem_alias_set (fp, setjmp_alias_set);
837 set_mem_alias_set (lab, setjmp_alias_set);
838 set_mem_alias_set (stack, setjmp_alias_set);
840 /* Pick up FP, label, and SP from the block and jump. This code is
841 from expand_goto in stmt.c; see there for detailed comments. */
842 #ifdef HAVE_nonlocal_goto
843 if (HAVE_nonlocal_goto)
844 /* We have to pass a value to the nonlocal_goto pattern that will
845 get copied into the static_chain pointer, but it does not matter
846 what that value is, because builtin_setjmp does not use it. */
847 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
851 lab = copy_to_reg (lab);
853 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
854 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
856 emit_move_insn (hard_frame_pointer_rtx, fp);
857 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
859 emit_use (hard_frame_pointer_rtx);
860 emit_use (stack_pointer_rtx);
861 emit_indirect_jump (lab);
865 /* Search backwards and mark the jump insn as a non-local goto.
866 Note that this precludes the use of __builtin_longjmp to a
867 __builtin_setjmp target in the same function. However, we've
868 already cautioned the user that these functions are for
869 internal exception handling use only. */
870 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
872 gcc_assert (insn != last);
876 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
879 else if (CALL_P (insn))
884 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
885 and the address of the save area. */
888 expand_builtin_nonlocal_goto (tree exp)
890 tree t_label, t_save_area;
891 rtx r_label, r_save_area, r_fp, r_sp, insn;
893 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
896 t_label = CALL_EXPR_ARG (exp, 0);
897 t_save_area = CALL_EXPR_ARG (exp, 1);
899 r_label = expand_normal (t_label);
900 r_label = convert_memory_address (Pmode, r_label);
901 r_save_area = expand_normal (t_save_area);
902 r_save_area = convert_memory_address (Pmode, r_save_area);
903 /* Copy the address of the save location to a register just in case it was based
904 on the frame pointer. */
905 r_save_area = copy_to_reg (r_save_area);
906 r_fp = gen_rtx_MEM (Pmode, r_save_area);
907 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
908 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
910 crtl->has_nonlocal_goto = 1;
912 #ifdef HAVE_nonlocal_goto
913 /* ??? We no longer need to pass the static chain value, afaik. */
914 if (HAVE_nonlocal_goto)
915 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
919 r_label = copy_to_reg (r_label);
921 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
922 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
924 /* Restore frame pointer for containing function.
925 This sets the actual hard register used for the frame pointer
926 to the location of the function's incoming static chain info.
927 The non-local goto handler will then adjust it to contain the
928 proper value and reload the argument pointer, if needed. */
929 emit_move_insn (hard_frame_pointer_rtx, r_fp);
930 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
932 /* USE of hard_frame_pointer_rtx added for consistency;
933 not clear if really needed. */
934 emit_use (hard_frame_pointer_rtx);
935 emit_use (stack_pointer_rtx);
937 /* If the architecture is using a GP register, we must
938 conservatively assume that the target function makes use of it.
939 The prologue of functions with nonlocal gotos must therefore
940 initialize the GP register to the appropriate value, and we
941 must then make sure that this value is live at the point
942 of the jump. (Note that this doesn't necessarily apply
943 to targets with a nonlocal_goto pattern; they are free
944 to implement it in their own way. Note also that this is
945 a no-op if the GP register is a global invariant.) */
946 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
947 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
948 emit_use (pic_offset_table_rtx);
950 emit_indirect_jump (r_label);
953 /* Search backwards to the jump insn and mark it as a
955 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
959 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
962 else if (CALL_P (insn))
969 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
970 (not all will be used on all machines) that was passed to __builtin_setjmp.
971 It updates the stack pointer in that block to correspond to the current
975 expand_builtin_update_setjmp_buf (rtx buf_addr)
977 enum machine_mode sa_mode = Pmode;
981 #ifdef HAVE_save_stack_nonlocal
982 if (HAVE_save_stack_nonlocal)
983 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
985 #ifdef STACK_SAVEAREA_MODE
986 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
990 = gen_rtx_MEM (sa_mode,
993 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
997 emit_insn (gen_setjmp ());
1000 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1003 /* Expand a call to __builtin_prefetch. For a target that does not support
1004 data prefetch, evaluate the memory address argument in case it has side
1008 expand_builtin_prefetch (tree exp)
1010 tree arg0, arg1, arg2;
1014 if (!validate_arglist (exp, POINTER_TYPE, 0))
1017 arg0 = CALL_EXPR_ARG (exp, 0);
1019 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1020 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1022 nargs = call_expr_nargs (exp);
1024 arg1 = CALL_EXPR_ARG (exp, 1);
1026 arg1 = integer_zero_node;
1028 arg2 = CALL_EXPR_ARG (exp, 2);
1030 arg2 = build_int_cst (NULL_TREE, 3);
1032 /* Argument 0 is an address. */
1033 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1035 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1036 if (TREE_CODE (arg1) != INTEGER_CST)
1038 error ("second argument to %<__builtin_prefetch%> must be a constant");
1039 arg1 = integer_zero_node;
1041 op1 = expand_normal (arg1);
1042 /* Argument 1 must be either zero or one. */
1043 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1045 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1050 /* Argument 2 (locality) must be a compile-time constant int. */
1051 if (TREE_CODE (arg2) != INTEGER_CST)
1053 error ("third argument to %<__builtin_prefetch%> must be a constant");
1054 arg2 = integer_zero_node;
1056 op2 = expand_normal (arg2);
1057 /* Argument 2 must be 0, 1, 2, or 3. */
1058 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1060 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1064 #ifdef HAVE_prefetch
1067 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1069 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1070 || (GET_MODE (op0) != Pmode))
1072 op0 = convert_memory_address (Pmode, op0);
1073 op0 = force_reg (Pmode, op0);
1075 emit_insn (gen_prefetch (op0, op1, op2));
1079 /* Don't do anything with direct references to volatile memory, but
1080 generate code to handle other side effects. */
1081 if (!MEM_P (op0) && side_effects_p (op0))
1085 /* Get a MEM rtx for expression EXP which is the address of an operand
1086 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1087 the maximum length of the block of memory that might be accessed or
1091 get_memory_rtx (tree exp, tree len)
1093 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1094 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1096 /* Get an expression we can use to find the attributes to assign to MEM.
1097 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1098 we can. First remove any nops. */
1099 while (CONVERT_EXPR_P (exp)
1100 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1101 exp = TREE_OPERAND (exp, 0);
1103 if (TREE_CODE (exp) == ADDR_EXPR)
1104 exp = TREE_OPERAND (exp, 0);
1105 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1106 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1110 /* Honor attributes derived from exp, except for the alias set
1111 (as builtin stringops may alias with anything) and the size
1112 (as stringops may access multiple array elements). */
1115 set_mem_attributes (mem, exp, 0);
1117 /* Allow the string and memory builtins to overflow from one
1118 field into another, see http://gcc.gnu.org/PR23561.
1119 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1120 memory accessed by the string or memory builtin will fit
1121 within the field. */
1122 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1124 tree mem_expr = MEM_EXPR (mem);
1125 HOST_WIDE_INT offset = -1, length = -1;
1128 while (TREE_CODE (inner) == ARRAY_REF
1129 || CONVERT_EXPR_P (inner)
1130 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1131 || TREE_CODE (inner) == SAVE_EXPR)
1132 inner = TREE_OPERAND (inner, 0);
1134 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1136 if (MEM_OFFSET (mem)
1137 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1138 offset = INTVAL (MEM_OFFSET (mem));
1140 if (offset >= 0 && len && host_integerp (len, 0))
1141 length = tree_low_cst (len, 0);
1143 while (TREE_CODE (inner) == COMPONENT_REF)
1145 tree field = TREE_OPERAND (inner, 1);
1146 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1147 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1149 /* Bitfields are generally not byte-addressable. */
1150 gcc_assert (!DECL_BIT_FIELD (field)
1151 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1152 % BITS_PER_UNIT) == 0
1153 && host_integerp (DECL_SIZE (field), 0)
1154 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1155 % BITS_PER_UNIT) == 0));
1157 /* If we can prove that the memory starting at XEXP (mem, 0) and
1158 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1159 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1160 fields without DECL_SIZE_UNIT like flexible array members. */
1162 && DECL_SIZE_UNIT (field)
1163 && host_integerp (DECL_SIZE_UNIT (field), 0))
1166 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1169 && offset + length <= size)
1174 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1175 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1176 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1184 mem_expr = TREE_OPERAND (mem_expr, 0);
1185 inner = TREE_OPERAND (inner, 0);
1188 if (mem_expr == NULL)
1190 if (mem_expr != MEM_EXPR (mem))
1192 set_mem_expr (mem, mem_expr);
1193 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1196 set_mem_alias_set (mem, 0);
1197 set_mem_size (mem, NULL_RTX);
1203 /* Built-in functions to perform an untyped call and return. */
1205 /* For each register that may be used for calling a function, this
1206 gives a mode used to copy the register's value. VOIDmode indicates
1207 the register is not used for calling a function. If the machine
1208 has register windows, this gives only the outbound registers.
1209 INCOMING_REGNO gives the corresponding inbound register. */
1210 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1212 /* For each register that may be used for returning values, this gives
1213 a mode used to copy the register's value. VOIDmode indicates the
1214 register is not used for returning values. If the machine has
1215 register windows, this gives only the outbound registers.
1216 INCOMING_REGNO gives the corresponding inbound register. */
1217 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1219 /* For each register that may be used for calling a function, this
1220 gives the offset of that register into the block returned by
1221 __builtin_apply_args. 0 indicates that the register is not
1222 used for calling a function. */
1223 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1225 /* Return the size required for the block returned by __builtin_apply_args,
1226 and initialize apply_args_mode. */
1229 apply_args_size (void)
1231 static int size = -1;
1234 enum machine_mode mode;
1236 /* The values computed by this function never change. */
1239 /* The first value is the incoming arg-pointer. */
1240 size = GET_MODE_SIZE (Pmode);
1242 /* The second value is the structure value address unless this is
1243 passed as an "invisible" first argument. */
1244 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1245 size += GET_MODE_SIZE (Pmode);
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_ARG_REGNO_P (regno))
1250 mode = reg_raw_mode[regno];
1252 gcc_assert (mode != VOIDmode);
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 apply_args_reg_offset[regno] = size;
1258 size += GET_MODE_SIZE (mode);
1259 apply_args_mode[regno] = mode;
1263 apply_args_mode[regno] = VOIDmode;
1264 apply_args_reg_offset[regno] = 0;
1270 /* Return the size required for the block returned by __builtin_apply,
1271 and initialize apply_result_mode. */
1274 apply_result_size (void)
1276 static int size = -1;
1278 enum machine_mode mode;
1280 /* The values computed by this function never change. */
1285 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1286 if (FUNCTION_VALUE_REGNO_P (regno))
1288 mode = reg_raw_mode[regno];
1290 gcc_assert (mode != VOIDmode);
1292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1293 if (size % align != 0)
1294 size = CEIL (size, align) * align;
1295 size += GET_MODE_SIZE (mode);
1296 apply_result_mode[regno] = mode;
1299 apply_result_mode[regno] = VOIDmode;
1301 /* Allow targets that use untyped_call and untyped_return to override
1302 the size so that machine-specific information can be stored here. */
1303 #ifdef APPLY_RESULT_SIZE
1304 size = APPLY_RESULT_SIZE;
1310 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1311 /* Create a vector describing the result block RESULT. If SAVEP is true,
1312 the result block is used to save the values; otherwise it is used to
1313 restore the values. */
1316 result_vector (int savep, rtx result)
1318 int regno, size, align, nelts;
1319 enum machine_mode mode;
1321 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_result_mode[regno]) != VOIDmode)
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1331 mem = adjust_address (result, mode, size);
1332 savevec[nelts++] = (savep
1333 ? gen_rtx_SET (VOIDmode, mem, reg)
1334 : gen_rtx_SET (VOIDmode, reg, mem));
1335 size += GET_MODE_SIZE (mode);
1337 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1339 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1341 /* Save the state required to perform an untyped call with the same
1342 arguments as were passed to the current function. */
1345 expand_builtin_apply_args_1 (void)
1348 int size, align, regno;
1349 enum machine_mode mode;
1350 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1352 /* Create a block where the arg-pointer, structure value address,
1353 and argument registers can be saved. */
1354 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1356 /* Walk past the arg-pointer and structure value address. */
1357 size = GET_MODE_SIZE (Pmode);
1358 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1359 size += GET_MODE_SIZE (Pmode);
1361 /* Save each register used in calling a function to the block. */
1362 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1363 if ((mode = apply_args_mode[regno]) != VOIDmode)
1365 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1366 if (size % align != 0)
1367 size = CEIL (size, align) * align;
1369 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1371 emit_move_insn (adjust_address (registers, mode, size), tem);
1372 size += GET_MODE_SIZE (mode);
1375 /* Save the arg pointer to the block. */
1376 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1377 #ifdef STACK_GROWS_DOWNWARD
1378 /* We need the pointer as the caller actually passed them to us, not
1379 as we might have pretended they were passed. Make sure it's a valid
1380 operand, as emit_move_insn isn't expected to handle a PLUS. */
1382 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1385 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1387 size = GET_MODE_SIZE (Pmode);
1389 /* Save the structure value address unless this is passed as an
1390 "invisible" first argument. */
1391 if (struct_incoming_value)
1393 emit_move_insn (adjust_address (registers, Pmode, size),
1394 copy_to_reg (struct_incoming_value));
1395 size += GET_MODE_SIZE (Pmode);
1398 /* Return the address of the block. */
1399 return copy_addr_to_reg (XEXP (registers, 0));
1402 /* __builtin_apply_args returns block of memory allocated on
1403 the stack into which is stored the arg pointer, structure
1404 value address, static chain, and all the registers that might
1405 possibly be used in performing a function call. The code is
1406 moved to the start of the function so the incoming values are
1410 expand_builtin_apply_args (void)
1412 /* Don't do __builtin_apply_args more than once in a function.
1413 Save the result of the first call and reuse it. */
1414 if (apply_args_value != 0)
1415 return apply_args_value;
1417 /* When this function is called, it means that registers must be
1418 saved on entry to this function. So we migrate the
1419 call to the first insn of this function. */
1424 temp = expand_builtin_apply_args_1 ();
1428 apply_args_value = temp;
1430 /* Put the insns after the NOTE that starts the function.
1431 If this is inside a start_sequence, make the outer-level insn
1432 chain current, so the code is placed at the start of the
1434 push_topmost_sequence ();
1435 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1436 pop_topmost_sequence ();
1441 /* Perform an untyped call and save the state required to perform an
1442 untyped return of whatever value was returned by the given function. */
1445 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1447 int size, align, regno;
1448 enum machine_mode mode;
1449 rtx incoming_args, result, reg, dest, src, call_insn;
1450 rtx old_stack_level = 0;
1451 rtx call_fusage = 0;
1452 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1454 arguments = convert_memory_address (Pmode, arguments);
1456 /* Create a block where the return registers can be saved. */
1457 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1459 /* Fetch the arg pointer from the ARGUMENTS block. */
1460 incoming_args = gen_reg_rtx (Pmode);
1461 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1462 #ifndef STACK_GROWS_DOWNWARD
1463 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1464 incoming_args, 0, OPTAB_LIB_WIDEN);
1467 /* Push a new argument block and copy the arguments. Do not allow
1468 the (potential) memcpy call below to interfere with our stack
1470 do_pending_stack_adjust ();
1473 /* Save the stack with nonlocal if available. */
1474 #ifdef HAVE_save_stack_nonlocal
1475 if (HAVE_save_stack_nonlocal)
1476 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1479 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1481 /* Allocate a block of memory onto the stack and copy the memory
1482 arguments to the outgoing arguments address. */
1483 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1485 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1486 may have already set current_function_calls_alloca to true.
1487 current_function_calls_alloca won't be set if argsize is zero,
1488 so we have to guarantee need_drap is true here. */
1489 if (SUPPORTS_STACK_ALIGNMENT)
1490 crtl->need_drap = true;
1492 dest = virtual_outgoing_args_rtx;
1493 #ifndef STACK_GROWS_DOWNWARD
1494 if (GET_CODE (argsize) == CONST_INT)
1495 dest = plus_constant (dest, -INTVAL (argsize));
1497 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1499 dest = gen_rtx_MEM (BLKmode, dest);
1500 set_mem_align (dest, PARM_BOUNDARY);
1501 src = gen_rtx_MEM (BLKmode, incoming_args);
1502 set_mem_align (src, PARM_BOUNDARY);
1503 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1505 /* Refer to the argument block. */
1507 arguments = gen_rtx_MEM (BLKmode, arguments);
1508 set_mem_align (arguments, PARM_BOUNDARY);
1510 /* Walk past the arg-pointer and structure value address. */
1511 size = GET_MODE_SIZE (Pmode);
1513 size += GET_MODE_SIZE (Pmode);
1515 /* Restore each of the registers previously saved. Make USE insns
1516 for each of these registers for use in making the call. */
1517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1518 if ((mode = apply_args_mode[regno]) != VOIDmode)
1520 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1521 if (size % align != 0)
1522 size = CEIL (size, align) * align;
1523 reg = gen_rtx_REG (mode, regno);
1524 emit_move_insn (reg, adjust_address (arguments, mode, size));
1525 use_reg (&call_fusage, reg);
1526 size += GET_MODE_SIZE (mode);
1529 /* Restore the structure value address unless this is passed as an
1530 "invisible" first argument. */
1531 size = GET_MODE_SIZE (Pmode);
1534 rtx value = gen_reg_rtx (Pmode);
1535 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1536 emit_move_insn (struct_value, value);
1537 if (REG_P (struct_value))
1538 use_reg (&call_fusage, struct_value);
1539 size += GET_MODE_SIZE (Pmode);
1542 /* All arguments and registers used for the call are set up by now! */
1543 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1545 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1546 and we don't want to load it into a register as an optimization,
1547 because prepare_call_address already did it if it should be done. */
1548 if (GET_CODE (function) != SYMBOL_REF)
1549 function = memory_address (FUNCTION_MODE, function);
1551 /* Generate the actual call instruction and save the return value. */
1552 #ifdef HAVE_untyped_call
1553 if (HAVE_untyped_call)
1554 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1555 result, result_vector (1, result)));
1558 #ifdef HAVE_call_value
1559 if (HAVE_call_value)
1563 /* Locate the unique return register. It is not possible to
1564 express a call that sets more than one return register using
1565 call_value; use untyped_call for that. In fact, untyped_call
1566 only needs to save the return registers in the given block. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_result_mode[regno]) != VOIDmode)
1570 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1572 valreg = gen_rtx_REG (mode, regno);
1575 emit_call_insn (GEN_CALL_VALUE (valreg,
1576 gen_rtx_MEM (FUNCTION_MODE, function),
1577 const0_rtx, NULL_RTX, const0_rtx));
1579 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1585 /* Find the CALL insn we just emitted, and attach the register usage
1587 call_insn = last_call_insn ();
1588 add_function_usage_to (call_insn, call_fusage);
1590 /* Restore the stack. */
1591 #ifdef HAVE_save_stack_nonlocal
1592 if (HAVE_save_stack_nonlocal)
1593 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1596 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1600 /* Return the address of the result block. */
1601 result = copy_addr_to_reg (XEXP (result, 0));
1602 return convert_memory_address (ptr_mode, result);
1605 /* Perform an untyped return. */
1608 expand_builtin_return (rtx result)
1610 int size, align, regno;
1611 enum machine_mode mode;
1613 rtx call_fusage = 0;
1615 result = convert_memory_address (Pmode, result);
1617 apply_result_size ();
1618 result = gen_rtx_MEM (BLKmode, result);
1620 #ifdef HAVE_untyped_return
1621 if (HAVE_untyped_return)
1623 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1629 /* Restore the return value and note that each value is used. */
1631 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1632 if ((mode = apply_result_mode[regno]) != VOIDmode)
1634 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1635 if (size % align != 0)
1636 size = CEIL (size, align) * align;
1637 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1638 emit_move_insn (reg, adjust_address (result, mode, size));
1640 push_to_sequence (call_fusage);
1642 call_fusage = get_insns ();
1644 size += GET_MODE_SIZE (mode);
1647 /* Put the USE insns before the return. */
1648 emit_insn (call_fusage);
1650 /* Return whatever values was restored by jumping directly to the end
1652 expand_naked_return ();
1655 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1657 static enum type_class
1658 type_to_class (tree type)
1660 switch (TREE_CODE (type))
1662 case VOID_TYPE: return void_type_class;
1663 case INTEGER_TYPE: return integer_type_class;
1664 case ENUMERAL_TYPE: return enumeral_type_class;
1665 case BOOLEAN_TYPE: return boolean_type_class;
1666 case POINTER_TYPE: return pointer_type_class;
1667 case REFERENCE_TYPE: return reference_type_class;
1668 case OFFSET_TYPE: return offset_type_class;
1669 case REAL_TYPE: return real_type_class;
1670 case COMPLEX_TYPE: return complex_type_class;
1671 case FUNCTION_TYPE: return function_type_class;
1672 case METHOD_TYPE: return method_type_class;
1673 case RECORD_TYPE: return record_type_class;
1675 case QUAL_UNION_TYPE: return union_type_class;
1676 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1677 ? string_type_class : array_type_class);
1678 case LANG_TYPE: return lang_type_class;
1679 default: return no_type_class;
1683 /* Expand a call EXP to __builtin_classify_type. */
1686 expand_builtin_classify_type (tree exp)
1688 if (call_expr_nargs (exp))
1689 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1690 return GEN_INT (no_type_class);
1693 /* This helper macro, meant to be used in mathfn_built_in below,
1694 determines which among a set of three builtin math functions is
1695 appropriate for a given type mode. The `F' and `L' cases are
1696 automatically generated from the `double' case. */
1697 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1698 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1699 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1700 fcodel = BUILT_IN_MATHFN##L ; break;
1701 /* Similar to above, but appends _R after any F/L suffix. */
1702 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1703 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1704 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1705 fcodel = BUILT_IN_MATHFN##L_R ; break;
1707 /* Return mathematic function equivalent to FN but operating directly
1708 on TYPE, if available. If IMPLICIT is true find the function in
1709 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1710 can't do the conversion, return zero. */
1713 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1715 tree const *const fn_arr
1716 = implicit ? implicit_built_in_decls : built_in_decls;
1717 enum built_in_function fcode, fcodef, fcodel;
1721 CASE_MATHFN (BUILT_IN_ACOS)
1722 CASE_MATHFN (BUILT_IN_ACOSH)
1723 CASE_MATHFN (BUILT_IN_ASIN)
1724 CASE_MATHFN (BUILT_IN_ASINH)
1725 CASE_MATHFN (BUILT_IN_ATAN)
1726 CASE_MATHFN (BUILT_IN_ATAN2)
1727 CASE_MATHFN (BUILT_IN_ATANH)
1728 CASE_MATHFN (BUILT_IN_CBRT)
1729 CASE_MATHFN (BUILT_IN_CEIL)
1730 CASE_MATHFN (BUILT_IN_CEXPI)
1731 CASE_MATHFN (BUILT_IN_COPYSIGN)
1732 CASE_MATHFN (BUILT_IN_COS)
1733 CASE_MATHFN (BUILT_IN_COSH)
1734 CASE_MATHFN (BUILT_IN_DREM)
1735 CASE_MATHFN (BUILT_IN_ERF)
1736 CASE_MATHFN (BUILT_IN_ERFC)
1737 CASE_MATHFN (BUILT_IN_EXP)
1738 CASE_MATHFN (BUILT_IN_EXP10)
1739 CASE_MATHFN (BUILT_IN_EXP2)
1740 CASE_MATHFN (BUILT_IN_EXPM1)
1741 CASE_MATHFN (BUILT_IN_FABS)
1742 CASE_MATHFN (BUILT_IN_FDIM)
1743 CASE_MATHFN (BUILT_IN_FLOOR)
1744 CASE_MATHFN (BUILT_IN_FMA)
1745 CASE_MATHFN (BUILT_IN_FMAX)
1746 CASE_MATHFN (BUILT_IN_FMIN)
1747 CASE_MATHFN (BUILT_IN_FMOD)
1748 CASE_MATHFN (BUILT_IN_FREXP)
1749 CASE_MATHFN (BUILT_IN_GAMMA)
1750 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1751 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1752 CASE_MATHFN (BUILT_IN_HYPOT)
1753 CASE_MATHFN (BUILT_IN_ILOGB)
1754 CASE_MATHFN (BUILT_IN_INF)
1755 CASE_MATHFN (BUILT_IN_ISINF)
1756 CASE_MATHFN (BUILT_IN_J0)
1757 CASE_MATHFN (BUILT_IN_J1)
1758 CASE_MATHFN (BUILT_IN_JN)
1759 CASE_MATHFN (BUILT_IN_LCEIL)
1760 CASE_MATHFN (BUILT_IN_LDEXP)
1761 CASE_MATHFN (BUILT_IN_LFLOOR)
1762 CASE_MATHFN (BUILT_IN_LGAMMA)
1763 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1764 CASE_MATHFN (BUILT_IN_LLCEIL)
1765 CASE_MATHFN (BUILT_IN_LLFLOOR)
1766 CASE_MATHFN (BUILT_IN_LLRINT)
1767 CASE_MATHFN (BUILT_IN_LLROUND)
1768 CASE_MATHFN (BUILT_IN_LOG)
1769 CASE_MATHFN (BUILT_IN_LOG10)
1770 CASE_MATHFN (BUILT_IN_LOG1P)
1771 CASE_MATHFN (BUILT_IN_LOG2)
1772 CASE_MATHFN (BUILT_IN_LOGB)
1773 CASE_MATHFN (BUILT_IN_LRINT)
1774 CASE_MATHFN (BUILT_IN_LROUND)
1775 CASE_MATHFN (BUILT_IN_MODF)
1776 CASE_MATHFN (BUILT_IN_NAN)
1777 CASE_MATHFN (BUILT_IN_NANS)
1778 CASE_MATHFN (BUILT_IN_NEARBYINT)
1779 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1780 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1781 CASE_MATHFN (BUILT_IN_POW)
1782 CASE_MATHFN (BUILT_IN_POWI)
1783 CASE_MATHFN (BUILT_IN_POW10)
1784 CASE_MATHFN (BUILT_IN_REMAINDER)
1785 CASE_MATHFN (BUILT_IN_REMQUO)
1786 CASE_MATHFN (BUILT_IN_RINT)
1787 CASE_MATHFN (BUILT_IN_ROUND)
1788 CASE_MATHFN (BUILT_IN_SCALB)
1789 CASE_MATHFN (BUILT_IN_SCALBLN)
1790 CASE_MATHFN (BUILT_IN_SCALBN)
1791 CASE_MATHFN (BUILT_IN_SIGNBIT)
1792 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1793 CASE_MATHFN (BUILT_IN_SIN)
1794 CASE_MATHFN (BUILT_IN_SINCOS)
1795 CASE_MATHFN (BUILT_IN_SINH)
1796 CASE_MATHFN (BUILT_IN_SQRT)
1797 CASE_MATHFN (BUILT_IN_TAN)
1798 CASE_MATHFN (BUILT_IN_TANH)
1799 CASE_MATHFN (BUILT_IN_TGAMMA)
1800 CASE_MATHFN (BUILT_IN_TRUNC)
1801 CASE_MATHFN (BUILT_IN_Y0)
1802 CASE_MATHFN (BUILT_IN_Y1)
1803 CASE_MATHFN (BUILT_IN_YN)
1809 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1810 return fn_arr[fcode];
1811 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1812 return fn_arr[fcodef];
1813 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1814 return fn_arr[fcodel];
1819 /* Like mathfn_built_in_1(), but always use the implicit array. */
1822 mathfn_built_in (tree type, enum built_in_function fn)
1824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1827 /* If errno must be maintained, expand the RTL to check if the result,
1828 TARGET, of a built-in function call, EXP, is NaN, and if so set
1832 expand_errno_check (tree exp, rtx target)
1834 rtx lab = gen_label_rtx ();
1836 /* Test the result; if it is NaN, set errno=EDOM because
1837 the argument was not in the domain. */
1838 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1842 /* If this built-in doesn't throw an exception, set errno directly. */
1843 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1845 #ifdef GEN_ERRNO_RTX
1846 rtx errno_rtx = GEN_ERRNO_RTX;
1849 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1851 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1857 /* Make sure the library call isn't expanded as a tail call. */
1858 CALL_EXPR_TAILCALL (exp) = 0;
1860 /* We can't set errno=EDOM directly; let the library call do it.
1861 Pop the arguments right away in case the call gets deleted. */
1863 expand_call (exp, target, 0);
1868 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1869 Return NULL_RTX if a normal call should be emitted rather than expanding
1870 the function in-line. EXP is the expression that is a call to the builtin
1871 function; if convenient, the result should be placed in TARGET.
1872 SUBTARGET may be used as the target for computing one of EXP's operands. */
1875 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1877 optab builtin_optab;
1878 rtx op0, insns, before_call;
1879 tree fndecl = get_callee_fndecl (exp);
1880 enum machine_mode mode;
1881 bool errno_set = false;
1884 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1887 arg = CALL_EXPR_ARG (exp, 0);
1889 switch (DECL_FUNCTION_CODE (fndecl))
1891 CASE_FLT_FN (BUILT_IN_SQRT):
1892 errno_set = ! tree_expr_nonnegative_p (arg);
1893 builtin_optab = sqrt_optab;
1895 CASE_FLT_FN (BUILT_IN_EXP):
1896 errno_set = true; builtin_optab = exp_optab; break;
1897 CASE_FLT_FN (BUILT_IN_EXP10):
1898 CASE_FLT_FN (BUILT_IN_POW10):
1899 errno_set = true; builtin_optab = exp10_optab; break;
1900 CASE_FLT_FN (BUILT_IN_EXP2):
1901 errno_set = true; builtin_optab = exp2_optab; break;
1902 CASE_FLT_FN (BUILT_IN_EXPM1):
1903 errno_set = true; builtin_optab = expm1_optab; break;
1904 CASE_FLT_FN (BUILT_IN_LOGB):
1905 errno_set = true; builtin_optab = logb_optab; break;
1906 CASE_FLT_FN (BUILT_IN_LOG):
1907 errno_set = true; builtin_optab = log_optab; break;
1908 CASE_FLT_FN (BUILT_IN_LOG10):
1909 errno_set = true; builtin_optab = log10_optab; break;
1910 CASE_FLT_FN (BUILT_IN_LOG2):
1911 errno_set = true; builtin_optab = log2_optab; break;
1912 CASE_FLT_FN (BUILT_IN_LOG1P):
1913 errno_set = true; builtin_optab = log1p_optab; break;
1914 CASE_FLT_FN (BUILT_IN_ASIN):
1915 builtin_optab = asin_optab; break;
1916 CASE_FLT_FN (BUILT_IN_ACOS):
1917 builtin_optab = acos_optab; break;
1918 CASE_FLT_FN (BUILT_IN_TAN):
1919 builtin_optab = tan_optab; break;
1920 CASE_FLT_FN (BUILT_IN_ATAN):
1921 builtin_optab = atan_optab; break;
1922 CASE_FLT_FN (BUILT_IN_FLOOR):
1923 builtin_optab = floor_optab; break;
1924 CASE_FLT_FN (BUILT_IN_CEIL):
1925 builtin_optab = ceil_optab; break;
1926 CASE_FLT_FN (BUILT_IN_TRUNC):
1927 builtin_optab = btrunc_optab; break;
1928 CASE_FLT_FN (BUILT_IN_ROUND):
1929 builtin_optab = round_optab; break;
1930 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1931 builtin_optab = nearbyint_optab;
1932 if (flag_trapping_math)
1934 /* Else fallthrough and expand as rint. */
1935 CASE_FLT_FN (BUILT_IN_RINT):
1936 builtin_optab = rint_optab; break;
1941 /* Make a suitable register to place result in. */
1942 mode = TYPE_MODE (TREE_TYPE (exp));
1944 if (! flag_errno_math || ! HONOR_NANS (mode))
1947 /* Before working hard, check whether the instruction is available. */
1948 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1950 target = gen_reg_rtx (mode);
1952 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1953 need to expand the argument again. This way, we will not perform
1954 side-effects more the once. */
1955 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1957 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1961 /* Compute into TARGET.
1962 Set TARGET to wherever the result comes back. */
1963 target = expand_unop (mode, builtin_optab, op0, target, 0);
1968 expand_errno_check (exp, target);
1970 /* Output the entire sequence. */
1971 insns = get_insns ();
1977 /* If we were unable to expand via the builtin, stop the sequence
1978 (without outputting the insns) and call to the library function
1979 with the stabilized argument list. */
1983 before_call = get_last_insn ();
1985 return expand_call (exp, target, target == const0_rtx);
1988 /* Expand a call to the builtin binary math functions (pow and atan2).
1989 Return NULL_RTX if a normal call should be emitted rather than expanding the
1990 function in-line. EXP is the expression that is a call to the builtin
1991 function; if convenient, the result should be placed in TARGET.
1992 SUBTARGET may be used as the target for computing one of EXP's
1996 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1998 optab builtin_optab;
1999 rtx op0, op1, insns;
2000 int op1_type = REAL_TYPE;
2001 tree fndecl = get_callee_fndecl (exp);
2003 enum machine_mode mode;
2004 bool errno_set = true;
2006 switch (DECL_FUNCTION_CODE (fndecl))
2008 CASE_FLT_FN (BUILT_IN_SCALBN):
2009 CASE_FLT_FN (BUILT_IN_SCALBLN):
2010 CASE_FLT_FN (BUILT_IN_LDEXP):
2011 op1_type = INTEGER_TYPE;
2016 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2019 arg0 = CALL_EXPR_ARG (exp, 0);
2020 arg1 = CALL_EXPR_ARG (exp, 1);
2022 switch (DECL_FUNCTION_CODE (fndecl))
2024 CASE_FLT_FN (BUILT_IN_POW):
2025 builtin_optab = pow_optab; break;
2026 CASE_FLT_FN (BUILT_IN_ATAN2):
2027 builtin_optab = atan2_optab; break;
2028 CASE_FLT_FN (BUILT_IN_SCALB):
2029 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2031 builtin_optab = scalb_optab; break;
2032 CASE_FLT_FN (BUILT_IN_SCALBN):
2033 CASE_FLT_FN (BUILT_IN_SCALBLN):
2034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2036 /* Fall through... */
2037 CASE_FLT_FN (BUILT_IN_LDEXP):
2038 builtin_optab = ldexp_optab; break;
2039 CASE_FLT_FN (BUILT_IN_FMOD):
2040 builtin_optab = fmod_optab; break;
2041 CASE_FLT_FN (BUILT_IN_REMAINDER):
2042 CASE_FLT_FN (BUILT_IN_DREM):
2043 builtin_optab = remainder_optab; break;
2048 /* Make a suitable register to place result in. */
2049 mode = TYPE_MODE (TREE_TYPE (exp));
2051 /* Before working hard, check whether the instruction is available. */
2052 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2055 target = gen_reg_rtx (mode);
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2060 /* Always stabilize the argument list. */
2061 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2062 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2064 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2065 op1 = expand_normal (arg1);
2069 /* Compute into TARGET.
2070 Set TARGET to wherever the result comes back. */
2071 target = expand_binop (mode, builtin_optab, op0, op1,
2072 target, 0, OPTAB_DIRECT);
2074 /* If we were unable to expand via the builtin, stop the sequence
2075 (without outputting the insns) and call to the library function
2076 with the stabilized argument list. */
2080 return expand_call (exp, target, target == const0_rtx);
2084 expand_errno_check (exp, target);
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2094 /* Expand a call to the builtin sin and cos math functions.
2095 Return NULL_RTX if a normal call should be emitted rather than expanding the
2096 function in-line. EXP is the expression that is a call to the builtin
2097 function; if convenient, the result should be placed in TARGET.
2098 SUBTARGET may be used as the target for computing one of EXP's
2102 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2104 optab builtin_optab;
2106 tree fndecl = get_callee_fndecl (exp);
2107 enum machine_mode mode;
2110 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2113 arg = CALL_EXPR_ARG (exp, 0);
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 CASE_FLT_FN (BUILT_IN_COS):
2119 builtin_optab = sincos_optab; break;
2124 /* Make a suitable register to place result in. */
2125 mode = TYPE_MODE (TREE_TYPE (exp));
2127 /* Check if sincos insn is available, otherwise fallback
2128 to sin or cos insn. */
2129 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2130 switch (DECL_FUNCTION_CODE (fndecl))
2132 CASE_FLT_FN (BUILT_IN_SIN):
2133 builtin_optab = sin_optab; break;
2134 CASE_FLT_FN (BUILT_IN_COS):
2135 builtin_optab = cos_optab; break;
2140 /* Before working hard, check whether the instruction is available. */
2141 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2143 target = gen_reg_rtx (mode);
2145 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2146 need to expand the argument again. This way, we will not perform
2147 side-effects more the once. */
2148 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2150 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2154 /* Compute into TARGET.
2155 Set TARGET to wherever the result comes back. */
2156 if (builtin_optab == sincos_optab)
2160 switch (DECL_FUNCTION_CODE (fndecl))
2162 CASE_FLT_FN (BUILT_IN_SIN):
2163 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2171 gcc_assert (result);
2175 target = expand_unop (mode, builtin_optab, op0, target, 0);
2180 /* Output the entire sequence. */
2181 insns = get_insns ();
2187 /* If we were unable to expand via the builtin, stop the sequence
2188 (without outputting the insns) and call to the library function
2189 with the stabilized argument list. */
2193 target = expand_call (exp, target, target == const0_rtx);
2198 /* Expand a call to one of the builtin math functions that operate on
2199 floating point argument and output an integer result (ilogb, isinf,
2201 Return 0 if a normal call should be emitted rather than expanding the
2202 function in-line. EXP is the expression that is a call to the builtin
2203 function; if convenient, the result should be placed in TARGET.
2204 SUBTARGET may be used as the target for computing one of EXP's operands. */
2207 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2209 optab builtin_optab = 0;
2210 enum insn_code icode = CODE_FOR_nothing;
2212 tree fndecl = get_callee_fndecl (exp);
2213 enum machine_mode mode;
2214 bool errno_set = false;
2217 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2220 arg = CALL_EXPR_ARG (exp, 0);
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_ILOGB):
2225 errno_set = true; builtin_optab = ilogb_optab; break;
2226 CASE_FLT_FN (BUILT_IN_ISINF):
2227 builtin_optab = isinf_optab; break;
2228 case BUILT_IN_ISNORMAL:
2229 case BUILT_IN_ISFINITE:
2230 CASE_FLT_FN (BUILT_IN_FINITE):
2231 /* These builtins have no optabs (yet). */
2237 /* There's no easy way to detect the case we need to set EDOM. */
2238 if (flag_errno_math && errno_set)
2241 /* Optab mode depends on the mode of the input argument. */
2242 mode = TYPE_MODE (TREE_TYPE (arg));
2245 icode = optab_handler (builtin_optab, mode)->insn_code;
2247 /* Before working hard, check whether the instruction is available. */
2248 if (icode != CODE_FOR_nothing)
2250 /* Make a suitable register to place result in. */
2252 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2253 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2255 gcc_assert (insn_data[icode].operand[0].predicate
2256 (target, GET_MODE (target)));
2258 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2259 need to expand the argument again. This way, we will not perform
2260 side-effects more the once. */
2261 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2263 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2265 if (mode != GET_MODE (op0))
2266 op0 = convert_to_mode (mode, op0, 0);
2268 /* Compute into TARGET.
2269 Set TARGET to wherever the result comes back. */
2270 emit_unop_insn (icode, target, op0, UNKNOWN);
2274 /* If there is no optab, try generic code. */
2275 switch (DECL_FUNCTION_CODE (fndecl))
2279 CASE_FLT_FN (BUILT_IN_ISINF):
2281 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2282 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2283 tree const type = TREE_TYPE (arg);
2287 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2288 real_from_string (&r, buf);
2289 result = build_call_expr (isgr_fn, 2,
2290 fold_build1 (ABS_EXPR, type, arg),
2291 build_real (type, r));
2292 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2294 CASE_FLT_FN (BUILT_IN_FINITE):
2295 case BUILT_IN_ISFINITE:
2297 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2298 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2299 tree const type = TREE_TYPE (arg);
2303 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2304 real_from_string (&r, buf);
2305 result = build_call_expr (isle_fn, 2,
2306 fold_build1 (ABS_EXPR, type, arg),
2307 build_real (type, r));
2308 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2310 case BUILT_IN_ISNORMAL:
2312 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2313 islessequal(fabs(x),DBL_MAX). */
2314 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2315 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2316 tree const type = TREE_TYPE (arg);
2317 REAL_VALUE_TYPE rmax, rmin;
2320 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2321 real_from_string (&rmax, buf);
2322 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2323 real_from_string (&rmin, buf);
2324 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2325 result = build_call_expr (isle_fn, 2, arg,
2326 build_real (type, rmax));
2327 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2328 build_call_expr (isge_fn, 2, arg,
2329 build_real (type, rmin)));
2330 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2336 target = expand_call (exp, target, target == const0_rtx);
2341 /* Expand a call to the builtin sincos math function.
2342 Return NULL_RTX if a normal call should be emitted rather than expanding the
2343 function in-line. EXP is the expression that is a call to the builtin
2347 expand_builtin_sincos (tree exp)
2349 rtx op0, op1, op2, target1, target2;
2350 enum machine_mode mode;
2351 tree arg, sinp, cosp;
2354 if (!validate_arglist (exp, REAL_TYPE,
2355 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2358 arg = CALL_EXPR_ARG (exp, 0);
2359 sinp = CALL_EXPR_ARG (exp, 1);
2360 cosp = CALL_EXPR_ARG (exp, 2);
2362 /* Make a suitable register to place result in. */
2363 mode = TYPE_MODE (TREE_TYPE (arg));
2365 /* Check if sincos insn is available, otherwise emit the call. */
2366 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2369 target1 = gen_reg_rtx (mode);
2370 target2 = gen_reg_rtx (mode);
2372 op0 = expand_normal (arg);
2373 op1 = expand_normal (build_fold_indirect_ref (sinp));
2374 op2 = expand_normal (build_fold_indirect_ref (cosp));
2376 /* Compute into target1 and target2.
2377 Set TARGET to wherever the result comes back. */
2378 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2379 gcc_assert (result);
2381 /* Move target1 and target2 to the memory locations indicated
2383 emit_move_insn (op1, target1);
2384 emit_move_insn (op2, target2);
2389 /* Expand a call to the internal cexpi builtin to the sincos math function.
2390 EXP is the expression that is a call to the builtin function; if convenient,
2391 the result should be placed in TARGET. SUBTARGET may be used as the target
2392 for computing one of EXP's operands. */
2395 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2397 tree fndecl = get_callee_fndecl (exp);
2399 enum machine_mode mode;
2402 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2405 arg = CALL_EXPR_ARG (exp, 0);
2406 type = TREE_TYPE (arg);
2407 mode = TYPE_MODE (TREE_TYPE (arg));
2409 /* Try expanding via a sincos optab, fall back to emitting a libcall
2410 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2411 is only generated from sincos, cexp or if we have either of them. */
2412 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2414 op1 = gen_reg_rtx (mode);
2415 op2 = gen_reg_rtx (mode);
2417 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2419 /* Compute into op1 and op2. */
2420 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2422 else if (TARGET_HAS_SINCOS)
2424 tree call, fn = NULL_TREE;
2428 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2429 fn = built_in_decls[BUILT_IN_SINCOSF];
2430 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2431 fn = built_in_decls[BUILT_IN_SINCOS];
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2433 fn = built_in_decls[BUILT_IN_SINCOSL];
2437 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2438 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2439 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2440 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2441 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2442 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2444 /* Make sure not to fold the sincos call again. */
2445 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2446 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2447 call, 3, arg, top1, top2));
2451 tree call, fn = NULL_TREE, narg;
2452 tree ctype = build_complex_type (type);
2454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2455 fn = built_in_decls[BUILT_IN_CEXPF];
2456 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2457 fn = built_in_decls[BUILT_IN_CEXP];
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2459 fn = built_in_decls[BUILT_IN_CEXPL];
2463 /* If we don't have a decl for cexp create one. This is the
2464 friendliest fallback if the user calls __builtin_cexpi
2465 without full target C99 function support. */
2466 if (fn == NULL_TREE)
2469 const char *name = NULL;
2471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2473 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2478 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2479 fn = build_fn_decl (name, fntype);
2482 narg = fold_build2 (COMPLEX_EXPR, ctype,
2483 build_real (type, dconst0), arg);
2485 /* Make sure not to fold the cexp call again. */
2486 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2487 return expand_expr (build_call_nary (ctype, call, 1, narg),
2488 target, VOIDmode, EXPAND_NORMAL);
2491 /* Now build the proper return type. */
2492 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2493 make_tree (TREE_TYPE (arg), op2),
2494 make_tree (TREE_TYPE (arg), op1)),
2495 target, VOIDmode, EXPAND_NORMAL);
2498 /* Expand a call to one of the builtin rounding functions gcc defines
2499 as an extension (lfloor and lceil). As these are gcc extensions we
2500 do not need to worry about setting errno to EDOM.
2501 If expanding via optab fails, lower expression to (int)(floor(x)).
2502 EXP is the expression that is a call to the builtin function;
2503 if convenient, the result should be placed in TARGET. */
2506 expand_builtin_int_roundingfn (tree exp, rtx target)
2508 convert_optab builtin_optab;
2509 rtx op0, insns, tmp;
2510 tree fndecl = get_callee_fndecl (exp);
2511 enum built_in_function fallback_fn;
2512 tree fallback_fndecl;
2513 enum machine_mode mode;
2516 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2519 arg = CALL_EXPR_ARG (exp, 0);
2521 switch (DECL_FUNCTION_CODE (fndecl))
2523 CASE_FLT_FN (BUILT_IN_LCEIL):
2524 CASE_FLT_FN (BUILT_IN_LLCEIL):
2525 builtin_optab = lceil_optab;
2526 fallback_fn = BUILT_IN_CEIL;
2529 CASE_FLT_FN (BUILT_IN_LFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2531 builtin_optab = lfloor_optab;
2532 fallback_fn = BUILT_IN_FLOOR;
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (exp));
2542 target = gen_reg_rtx (mode);
2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
2547 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2549 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target, op0, builtin_optab))
2556 /* Output the entire sequence. */
2557 insns = get_insns ();
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl == NULL_TREE)
2577 const char *name = NULL;
2579 switch (DECL_FUNCTION_CODE (fndecl))
2581 case BUILT_IN_LCEIL:
2582 case BUILT_IN_LLCEIL:
2585 case BUILT_IN_LCEILF:
2586 case BUILT_IN_LLCEILF:
2589 case BUILT_IN_LCEILL:
2590 case BUILT_IN_LLCEILL:
2593 case BUILT_IN_LFLOOR:
2594 case BUILT_IN_LLFLOOR:
2597 case BUILT_IN_LFLOORF:
2598 case BUILT_IN_LLFLOORF:
2601 case BUILT_IN_LFLOORL:
2602 case BUILT_IN_LLFLOORL:
2609 fntype = build_function_type_list (TREE_TYPE (arg),
2610 TREE_TYPE (arg), NULL_TREE);
2611 fallback_fndecl = build_fn_decl (name, fntype);
2614 exp = build_call_expr (fallback_fndecl, 1, arg);
2616 tmp = expand_normal (exp);
2618 /* Truncate the result of floating point optab to integer
2619 via expand_fix (). */
2620 target = gen_reg_rtx (mode);
2621 expand_fix (target, tmp, 0);
2626 /* Expand a call to one of the builtin math functions doing integer
2628 Return 0 if a normal call should be emitted rather than expanding the
2629 function in-line. EXP is the expression that is a call to the builtin
2630 function; if convenient, the result should be placed in TARGET. */
2633 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2635 convert_optab builtin_optab;
2637 tree fndecl = get_callee_fndecl (exp);
2639 enum machine_mode mode;
2641 /* There's no easy way to detect the case we need to set EDOM. */
2642 if (flag_errno_math)
2645 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2648 arg = CALL_EXPR_ARG (exp, 0);
2650 switch (DECL_FUNCTION_CODE (fndecl))
2652 CASE_FLT_FN (BUILT_IN_LRINT):
2653 CASE_FLT_FN (BUILT_IN_LLRINT):
2654 builtin_optab = lrint_optab; break;
2655 CASE_FLT_FN (BUILT_IN_LROUND):
2656 CASE_FLT_FN (BUILT_IN_LLROUND):
2657 builtin_optab = lround_optab; break;
2662 /* Make a suitable register to place result in. */
2663 mode = TYPE_MODE (TREE_TYPE (exp));
2665 target = gen_reg_rtx (mode);
2667 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2668 need to expand the argument again. This way, we will not perform
2669 side-effects more the once. */
2670 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2672 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2676 if (expand_sfix_optab (target, op0, builtin_optab))
2678 /* Output the entire sequence. */
2679 insns = get_insns ();
2685 /* If we were unable to expand via the builtin, stop the sequence
2686 (without outputting the insns) and call to the library function
2687 with the stabilized argument list. */
2690 target = expand_call (exp, target, target == const0_rtx);
2695 /* To evaluate powi(x,n), the floating point value x raised to the
2696 constant integer exponent n, we use a hybrid algorithm that
2697 combines the "window method" with look-up tables. For an
2698 introduction to exponentiation algorithms and "addition chains",
2699 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2700 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2701 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2702 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2704 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2705 multiplications to inline before calling the system library's pow
2706 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2707 so this default never requires calling pow, powf or powl. */
2709 #ifndef POWI_MAX_MULTS
2710 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2713 /* The size of the "optimal power tree" lookup table. All
2714 exponents less than this value are simply looked up in the
2715 powi_table below. This threshold is also used to size the
2716 cache of pseudo registers that hold intermediate results. */
2717 #define POWI_TABLE_SIZE 256
2719 /* The size, in bits of the window, used in the "window method"
2720 exponentiation algorithm. This is equivalent to a radix of
2721 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2722 #define POWI_WINDOW_SIZE 3
2724 /* The following table is an efficient representation of an
2725 "optimal power tree". For each value, i, the corresponding
2726 value, j, in the table states than an optimal evaluation
2727 sequence for calculating pow(x,i) can be found by evaluating
2728 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2729 100 integers is given in Knuth's "Seminumerical algorithms". */
2731 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2733 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2734 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2735 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2736 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2737 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2738 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2739 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2740 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2741 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2742 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2743 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2744 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2745 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2746 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2747 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2748 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2749 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2750 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2751 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2752 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2753 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2754 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2755 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2756 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2757 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2758 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2759 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2760 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2761 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2762 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2763 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2764 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2768 /* Return the number of multiplications required to calculate
2769 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2770 subroutine of powi_cost. CACHE is an array indicating
2771 which exponents have already been calculated. */
2774 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2776 /* If we've already calculated this exponent, then this evaluation
2777 doesn't require any additional multiplications. */
2782 return powi_lookup_cost (n - powi_table[n], cache)
2783 + powi_lookup_cost (powi_table[n], cache) + 1;
2786 /* Return the number of multiplications required to calculate
2787 powi(x,n) for an arbitrary x, given the exponent N. This
2788 function needs to be kept in sync with expand_powi below. */
2791 powi_cost (HOST_WIDE_INT n)
2793 bool cache[POWI_TABLE_SIZE];
2794 unsigned HOST_WIDE_INT digit;
2795 unsigned HOST_WIDE_INT val;
2801 /* Ignore the reciprocal when calculating the cost. */
2802 val = (n < 0) ? -n : n;
2804 /* Initialize the exponent cache. */
2805 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2810 while (val >= POWI_TABLE_SIZE)
2814 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2815 result += powi_lookup_cost (digit, cache)
2816 + POWI_WINDOW_SIZE + 1;
2817 val >>= POWI_WINDOW_SIZE;
2826 return result + powi_lookup_cost (val, cache);
2829 /* Recursive subroutine of expand_powi. This function takes the array,
2830 CACHE, of already calculated exponents and an exponent N and returns
2831 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2834 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2836 unsigned HOST_WIDE_INT digit;
2840 if (n < POWI_TABLE_SIZE)
2845 target = gen_reg_rtx (mode);
2848 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2849 op1 = expand_powi_1 (mode, powi_table[n], cache);
2853 target = gen_reg_rtx (mode);
2854 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2855 op0 = expand_powi_1 (mode, n - digit, cache);
2856 op1 = expand_powi_1 (mode, digit, cache);
2860 target = gen_reg_rtx (mode);
2861 op0 = expand_powi_1 (mode, n >> 1, cache);
2865 result = expand_mult (mode, op0, op1, target, 0);
2866 if (result != target)
2867 emit_move_insn (target, result);
2871 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2872 floating point operand in mode MODE, and N is the exponent. This
2873 function needs to be kept in sync with powi_cost above. */
2876 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2878 unsigned HOST_WIDE_INT val;
2879 rtx cache[POWI_TABLE_SIZE];
2883 return CONST1_RTX (mode);
2885 val = (n < 0) ? -n : n;
2887 memset (cache, 0, sizeof (cache));
2890 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2892 /* If the original exponent was negative, reciprocate the result. */
2894 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2895 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2900 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2901 a normal call should be emitted rather than expanding the function
2902 in-line. EXP is the expression that is a call to the builtin
2903 function; if convenient, the result should be placed in TARGET. */
2906 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2910 tree type = TREE_TYPE (exp);
2911 REAL_VALUE_TYPE cint, c, c2;
2914 enum machine_mode mode = TYPE_MODE (type);
2916 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2919 arg0 = CALL_EXPR_ARG (exp, 0);
2920 arg1 = CALL_EXPR_ARG (exp, 1);
2922 if (TREE_CODE (arg1) != REAL_CST
2923 || TREE_OVERFLOW (arg1))
2924 return expand_builtin_mathfn_2 (exp, target, subtarget);
2926 /* Handle constant exponents. */
2928 /* For integer valued exponents we can expand to an optimal multiplication
2929 sequence using expand_powi. */
2930 c = TREE_REAL_CST (arg1);
2931 n = real_to_integer (&c);
2932 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2933 if (real_identical (&c, &cint)
2934 && ((n >= -1 && n <= 2)
2935 || (flag_unsafe_math_optimizations
2936 && optimize_insn_for_speed_p ()
2937 && powi_cost (n) <= POWI_MAX_MULTS)))
2939 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2942 op = force_reg (mode, op);
2943 op = expand_powi (op, mode, n);
2948 narg0 = builtin_save_expr (arg0);
2950 /* If the exponent is not integer valued, check if it is half of an integer.
2951 In this case we can expand to sqrt (x) * x**(n/2). */
2952 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2953 if (fn != NULL_TREE)
2955 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2956 n = real_to_integer (&c2);
2957 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2958 if (real_identical (&c2, &cint)
2959 && ((flag_unsafe_math_optimizations
2960 && optimize_insn_for_speed_p ()
2961 && powi_cost (n/2) <= POWI_MAX_MULTS)
2964 tree call_expr = build_call_expr (fn, 1, narg0);
2965 /* Use expand_expr in case the newly built call expression
2966 was folded to a non-call. */
2967 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2970 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2971 op2 = force_reg (mode, op2);
2972 op2 = expand_powi (op2, mode, abs (n / 2));
2973 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2974 0, OPTAB_LIB_WIDEN);
2975 /* If the original exponent was negative, reciprocate the
2978 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2979 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2985 /* Try if the exponent is a third of an integer. In this case
2986 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2987 different from pow (x, 1./3.) due to rounding and behavior
2988 with negative x we need to constrain this transformation to
2989 unsafe math and positive x or finite math. */
2990 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2992 && flag_unsafe_math_optimizations
2993 && (tree_expr_nonnegative_p (arg0)
2994 || !HONOR_NANS (mode)))
2996 REAL_VALUE_TYPE dconst3;
2997 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2998 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2999 real_round (&c2, mode, &c2);
3000 n = real_to_integer (&c2);
3001 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3002 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3003 real_convert (&c2, mode, &c2);
3004 if (real_identical (&c2, &c)
3005 && ((optimize_insn_for_speed_p ()
3006 && powi_cost (n/3) <= POWI_MAX_MULTS)
3009 tree call_expr = build_call_expr (fn, 1,narg0);
3010 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3011 if (abs (n) % 3 == 2)
3012 op = expand_simple_binop (mode, MULT, op, op, op,
3013 0, OPTAB_LIB_WIDEN);
3016 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3017 op2 = force_reg (mode, op2);
3018 op2 = expand_powi (op2, mode, abs (n / 3));
3019 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3020 0, OPTAB_LIB_WIDEN);
3021 /* If the original exponent was negative, reciprocate the
3024 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3025 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3031 /* Fall back to optab expansion. */
3032 return expand_builtin_mathfn_2 (exp, target, subtarget);
3035 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3036 a normal call should be emitted rather than expanding the function
3037 in-line. EXP is the expression that is a call to the builtin
3038 function; if convenient, the result should be placed in TARGET. */
3041 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3045 enum machine_mode mode;
3046 enum machine_mode mode2;
3048 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3051 arg0 = CALL_EXPR_ARG (exp, 0);
3052 arg1 = CALL_EXPR_ARG (exp, 1);
3053 mode = TYPE_MODE (TREE_TYPE (exp));
3055 /* Handle constant power. */
3057 if (TREE_CODE (arg1) == INTEGER_CST
3058 && !TREE_OVERFLOW (arg1))
3060 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3062 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3063 Otherwise, check the number of multiplications required. */
3064 if ((TREE_INT_CST_HIGH (arg1) == 0
3065 || TREE_INT_CST_HIGH (arg1) == -1)
3066 && ((n >= -1 && n <= 2)
3067 || (optimize_insn_for_speed_p ()
3068 && powi_cost (n) <= POWI_MAX_MULTS)))
3070 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3071 op0 = force_reg (mode, op0);
3072 return expand_powi (op0, mode, n);
3076 /* Emit a libcall to libgcc. */
3078 /* Mode of the 2nd argument must match that of an int. */
3079 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3081 if (target == NULL_RTX)
3082 target = gen_reg_rtx (mode);
3084 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3085 if (GET_MODE (op0) != mode)
3086 op0 = convert_to_mode (mode, op0, 0);
3087 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3088 if (GET_MODE (op1) != mode2)
3089 op1 = convert_to_mode (mode2, op1, 0);
3091 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3092 target, LCT_CONST, mode, 2,
3093 op0, mode, op1, mode2);
3098 /* Expand expression EXP which is a call to the strlen builtin. Return
3099 NULL_RTX if we failed the caller should emit a normal call, otherwise
3100 try to get the result in TARGET, if convenient. */
3103 expand_builtin_strlen (tree exp, rtx target,
3104 enum machine_mode target_mode)
3106 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3112 tree src = CALL_EXPR_ARG (exp, 0);
3113 rtx result, src_reg, char_rtx, before_strlen;
3114 enum machine_mode insn_mode = target_mode, char_mode;
3115 enum insn_code icode = CODE_FOR_nothing;
3118 /* If the length can be computed at compile-time, return it. */
3119 len = c_strlen (src, 0);
3121 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3123 /* If the length can be computed at compile-time and is constant
3124 integer, but there are side-effects in src, evaluate
3125 src for side-effects, then return len.
3126 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3127 can be optimized into: i++; x = 3; */
3128 len = c_strlen (src, 1);
3129 if (len && TREE_CODE (len) == INTEGER_CST)
3131 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3132 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3135 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3137 /* If SRC is not a pointer type, don't do this operation inline. */
3141 /* Bail out if we can't compute strlen in the right mode. */
3142 while (insn_mode != VOIDmode)
3144 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3145 if (icode != CODE_FOR_nothing)
3148 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3150 if (insn_mode == VOIDmode)
3153 /* Make a place to write the result of the instruction. */
3157 && GET_MODE (result) == insn_mode
3158 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3159 result = gen_reg_rtx (insn_mode);
3161 /* Make a place to hold the source address. We will not expand
3162 the actual source until we are sure that the expansion will
3163 not fail -- there are trees that cannot be expanded twice. */
3164 src_reg = gen_reg_rtx (Pmode);
3166 /* Mark the beginning of the strlen sequence so we can emit the
3167 source operand later. */
3168 before_strlen = get_last_insn ();
3170 char_rtx = const0_rtx;
3171 char_mode = insn_data[(int) icode].operand[2].mode;
3172 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3174 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3176 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3177 char_rtx, GEN_INT (align));
3182 /* Now that we are assured of success, expand the source. */
3184 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3186 emit_move_insn (src_reg, pat);
3191 emit_insn_after (pat, before_strlen);
3193 emit_insn_before (pat, get_insns ());
3195 /* Return the value in the proper mode for this function. */
3196 if (GET_MODE (result) == target_mode)
3198 else if (target != 0)
3199 convert_move (target, result, 0);
3201 target = convert_to_mode (target_mode, result, 0);
3207 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3208 caller should emit a normal call, otherwise try to get the result
3209 in TARGET, if convenient (and in mode MODE if that's convenient). */
3212 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3214 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3216 tree type = TREE_TYPE (exp);
3217 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3218 CALL_EXPR_ARG (exp, 1), type);
3220 return expand_expr (result, target, mode, EXPAND_NORMAL);
3225 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3226 caller should emit a normal call, otherwise try to get the result
3227 in TARGET, if convenient (and in mode MODE if that's convenient). */
3230 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3234 tree type = TREE_TYPE (exp);
3235 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3236 CALL_EXPR_ARG (exp, 1), type);
3238 return expand_expr (result, target, mode, EXPAND_NORMAL);
3240 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3245 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3246 caller should emit a normal call, otherwise try to get the result
3247 in TARGET, if convenient (and in mode MODE if that's convenient). */
3250 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3252 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3254 tree type = TREE_TYPE (exp);
3255 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3256 CALL_EXPR_ARG (exp, 1), type);
3258 return expand_expr (result, target, mode, EXPAND_NORMAL);
3263 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3264 caller should emit a normal call, otherwise try to get the result
3265 in TARGET, if convenient (and in mode MODE if that's convenient). */
3268 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3270 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3272 tree type = TREE_TYPE (exp);
3273 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3281 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3282 bytes from constant string DATA + OFFSET and return it as target
3286 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3287 enum machine_mode mode)
3289 const char *str = (const char *) data;
3291 gcc_assert (offset >= 0
3292 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3293 <= strlen (str) + 1));
3295 return c_readstr (str + offset, mode);
3298 /* Expand a call EXP to the memcpy builtin.
3299 Return NULL_RTX if we failed, the caller should emit a normal call,
3300 otherwise try to get the result in TARGET, if convenient (and in
3301 mode MODE if that's convenient). */
3304 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3306 tree fndecl = get_callee_fndecl (exp);
3308 if (!validate_arglist (exp,
3309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3313 tree dest = CALL_EXPR_ARG (exp, 0);
3314 tree src = CALL_EXPR_ARG (exp, 1);
3315 tree len = CALL_EXPR_ARG (exp, 2);
3316 const char *src_str;
3317 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3318 unsigned int dest_align
3319 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3320 rtx dest_mem, src_mem, dest_addr, len_rtx;
3321 tree result = fold_builtin_memory_op (dest, src, len,
3322 TREE_TYPE (TREE_TYPE (fndecl)),
3324 HOST_WIDE_INT expected_size = -1;
3325 unsigned int expected_align = 0;
3326 tree_ann_common_t ann;
3330 while (TREE_CODE (result) == COMPOUND_EXPR)
3332 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3334 result = TREE_OPERAND (result, 1);
3336 return expand_expr (result, target, mode, EXPAND_NORMAL);
3339 /* If DEST is not a pointer type, call the normal function. */
3340 if (dest_align == 0)
3343 /* If either SRC is not a pointer type, don't do this
3344 operation in-line. */
3348 ann = tree_common_ann (exp);
3350 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3352 if (expected_align < dest_align)
3353 expected_align = dest_align;
3354 dest_mem = get_memory_rtx (dest, len);
3355 set_mem_align (dest_mem, dest_align);
3356 len_rtx = expand_normal (len);
3357 src_str = c_getstr (src);
3359 /* If SRC is a string constant and block move would be done
3360 by pieces, we can avoid loading the string from memory
3361 and only stored the computed constants. */
3363 && GET_CODE (len_rtx) == CONST_INT
3364 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3365 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3366 CONST_CAST (char *, src_str),
3369 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3370 builtin_memcpy_read_str,
3371 CONST_CAST (char *, src_str),
3372 dest_align, false, 0);
3373 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3374 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3378 src_mem = get_memory_rtx (src, len);
3379 set_mem_align (src_mem, src_align);
3381 /* Copy word part most expediently. */
3382 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3383 CALL_EXPR_TAILCALL (exp)
3384 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3385 expected_align, expected_size);
3389 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3390 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3396 /* Expand a call EXP to the mempcpy builtin.
3397 Return NULL_RTX if we failed; the caller should emit a normal call,
3398 otherwise try to get the result in TARGET, if convenient (and in
3399 mode MODE if that's convenient). If ENDP is 0 return the
3400 destination pointer, if ENDP is 1 return the end pointer ala
3401 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3405 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3407 if (!validate_arglist (exp,
3408 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3412 tree dest = CALL_EXPR_ARG (exp, 0);
3413 tree src = CALL_EXPR_ARG (exp, 1);
3414 tree len = CALL_EXPR_ARG (exp, 2);
3415 return expand_builtin_mempcpy_args (dest, src, len,
3417 target, mode, /*endp=*/ 1);
3421 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3422 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3423 so that this can also be called without constructing an actual CALL_EXPR.
3424 TYPE is the return type of the call. The other arguments and return value
3425 are the same as for expand_builtin_mempcpy. */
3428 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3429 rtx target, enum machine_mode mode, int endp)
3431 /* If return value is ignored, transform mempcpy into memcpy. */
3432 if (target == const0_rtx)
3434 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3439 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3440 target, mode, EXPAND_NORMAL);
3444 const char *src_str;
3445 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3446 unsigned int dest_align
3447 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3448 rtx dest_mem, src_mem, len_rtx;
3449 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3453 while (TREE_CODE (result) == COMPOUND_EXPR)
3455 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3457 result = TREE_OPERAND (result, 1);
3459 return expand_expr (result, target, mode, EXPAND_NORMAL);
3462 /* If either SRC or DEST is not a pointer type, don't do this
3463 operation in-line. */
3464 if (dest_align == 0 || src_align == 0)
3467 /* If LEN is not constant, call the normal function. */
3468 if (! host_integerp (len, 1))
3471 len_rtx = expand_normal (len);
3472 src_str = c_getstr (src);
3474 /* If SRC is a string constant and block move would be done
3475 by pieces, we can avoid loading the string from memory
3476 and only stored the computed constants. */
3478 && GET_CODE (len_rtx) == CONST_INT
3479 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3480 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3481 CONST_CAST (char *, src_str),
3484 dest_mem = get_memory_rtx (dest, len);
3485 set_mem_align (dest_mem, dest_align);
3486 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3487 builtin_memcpy_read_str,
3488 CONST_CAST (char *, src_str),
3489 dest_align, false, endp);
3490 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3491 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3495 if (GET_CODE (len_rtx) == CONST_INT
3496 && can_move_by_pieces (INTVAL (len_rtx),
3497 MIN (dest_align, src_align)))
3499 dest_mem = get_memory_rtx (dest, len);
3500 set_mem_align (dest_mem, dest_align);
3501 src_mem = get_memory_rtx (src, len);
3502 set_mem_align (src_mem, src_align);
3503 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3504 MIN (dest_align, src_align), endp);
3505 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3506 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3514 /* Expand expression EXP, which is a call to the memmove builtin. Return
3515 NULL_RTX if we failed; the caller should emit a normal call. */
3518 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 tree dest = CALL_EXPR_ARG (exp, 0);
3526 tree src = CALL_EXPR_ARG (exp, 1);
3527 tree len = CALL_EXPR_ARG (exp, 2);
3528 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3529 target, mode, ignore);
3533 /* Helper function to do the actual work for expand_builtin_memmove. The
3534 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3535 so that this can also be called without constructing an actual CALL_EXPR.
3536 TYPE is the return type of the call. The other arguments and return value
3537 are the same as for expand_builtin_memmove. */
3540 expand_builtin_memmove_args (tree dest, tree src, tree len,
3541 tree type, rtx target, enum machine_mode mode,
3544 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3548 STRIP_TYPE_NOPS (result);
3549 while (TREE_CODE (result) == COMPOUND_EXPR)
3551 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3553 result = TREE_OPERAND (result, 1);
3555 return expand_expr (result, target, mode, EXPAND_NORMAL);
3558 /* Otherwise, call the normal function. */
3562 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3563 NULL_RTX if we failed the caller should emit a normal call. */
3566 expand_builtin_bcopy (tree exp, int ignore)
3568 tree type = TREE_TYPE (exp);
3569 tree src, dest, size;
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3575 src = CALL_EXPR_ARG (exp, 0);
3576 dest = CALL_EXPR_ARG (exp, 1);
3577 size = CALL_EXPR_ARG (exp, 2);
3579 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3580 This is done this way so that if it isn't expanded inline, we fall
3581 back to calling bcopy instead of memmove. */
3582 return expand_builtin_memmove_args (dest, src,
3583 fold_convert (sizetype, size),
3584 type, const0_rtx, VOIDmode,
3589 # define HAVE_movstr 0
3590 # define CODE_FOR_movstr CODE_FOR_nothing
3593 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3594 we failed, the caller should emit a normal call, otherwise try to
3595 get the result in TARGET, if convenient. If ENDP is 0 return the
3596 destination pointer, if ENDP is 1 return the end pointer ala
3597 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3601 expand_movstr (tree dest, tree src, rtx target, int endp)
3607 const struct insn_data * data;
3612 dest_mem = get_memory_rtx (dest, NULL);
3613 src_mem = get_memory_rtx (src, NULL);
3616 target = force_reg (Pmode, XEXP (dest_mem, 0));
3617 dest_mem = replace_equiv_address (dest_mem, target);
3618 end = gen_reg_rtx (Pmode);
3622 if (target == 0 || target == const0_rtx)
3624 end = gen_reg_rtx (Pmode);
3632 data = insn_data + CODE_FOR_movstr;
3634 if (data->operand[0].mode != VOIDmode)
3635 end = gen_lowpart (data->operand[0].mode, end);
3637 insn = data->genfun (end, dest_mem, src_mem);
3643 /* movstr is supposed to set end to the address of the NUL
3644 terminator. If the caller requested a mempcpy-like return value,
3646 if (endp == 1 && target != const0_rtx)
3648 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3655 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3656 NULL_RTX if we failed the caller should emit a normal call, otherwise
3657 try to get the result in TARGET, if convenient (and in mode MODE if that's
3661 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3665 tree dest = CALL_EXPR_ARG (exp, 0);
3666 tree src = CALL_EXPR_ARG (exp, 1);
3667 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3672 /* Helper function to do the actual work for expand_builtin_strcpy. The
3673 arguments to the builtin_strcpy call DEST and SRC are broken out
3674 so that this can also be called without constructing an actual CALL_EXPR.
3675 The other arguments and return value are the same as for
3676 expand_builtin_strcpy. */
3679 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3680 rtx target, enum machine_mode mode)
3682 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3684 return expand_expr (result, target, mode, EXPAND_NORMAL);
3685 return expand_movstr (dest, src, target, /*endp=*/0);
3689 /* Expand a call EXP to the stpcpy builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call,
3691 otherwise try to get the result in TARGET, if convenient (and in
3692 mode MODE if that's convenient). */
3695 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3702 dst = CALL_EXPR_ARG (exp, 0);
3703 src = CALL_EXPR_ARG (exp, 1);
3705 /* If return value is ignored, transform stpcpy into strcpy. */
3706 if (target == const0_rtx)
3708 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3712 return expand_expr (build_call_expr (fn, 2, dst, src),
3713 target, mode, EXPAND_NORMAL);
3720 /* Ensure we get an actual string whose length can be evaluated at
3721 compile-time, not an expression containing a string. This is
3722 because the latter will potentially produce pessimized code
3723 when used to produce the return value. */
3724 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3725 return expand_movstr (dst, src, target, /*endp=*/2);
3727 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3728 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3729 target, mode, /*endp=*/2);
3734 if (TREE_CODE (len) == INTEGER_CST)
3736 rtx len_rtx = expand_normal (len);
3738 if (GET_CODE (len_rtx) == CONST_INT)
3740 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3741 dst, src, target, mode);
3747 if (mode != VOIDmode)
3748 target = gen_reg_rtx (mode);
3750 target = gen_reg_rtx (GET_MODE (ret));
3752 if (GET_MODE (target) != GET_MODE (ret))
3753 ret = gen_lowpart (GET_MODE (target), ret);
3755 ret = plus_constant (ret, INTVAL (len_rtx));
3756 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3764 return expand_movstr (dst, src, target, /*endp=*/2);
3768 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3769 bytes from constant string DATA + OFFSET and return it as target
3773 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3774 enum machine_mode mode)
3776 const char *str = (const char *) data;
3778 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3781 return c_readstr (str + offset, mode);
3784 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3785 NULL_RTX if we failed the caller should emit a normal call. */
3788 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3790 tree fndecl = get_callee_fndecl (exp);
3792 if (validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3795 tree dest = CALL_EXPR_ARG (exp, 0);
3796 tree src = CALL_EXPR_ARG (exp, 1);
3797 tree len = CALL_EXPR_ARG (exp, 2);
3798 tree slen = c_strlen (src, 1);
3799 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3803 while (TREE_CODE (result) == COMPOUND_EXPR)
3805 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3807 result = TREE_OPERAND (result, 1);
3809 return expand_expr (result, target, mode, EXPAND_NORMAL);
3812 /* We must be passed a constant len and src parameter. */
3813 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3816 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3818 /* We're required to pad with trailing zeros if the requested
3819 len is greater than strlen(s2)+1. In that case try to
3820 use store_by_pieces, if it fails, punt. */
3821 if (tree_int_cst_lt (slen, len))
3823 unsigned int dest_align
3824 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3825 const char *p = c_getstr (src);
3828 if (!p || dest_align == 0 || !host_integerp (len, 1)
3829 || !can_store_by_pieces (tree_low_cst (len, 1),
3830 builtin_strncpy_read_str,
3831 CONST_CAST (char *, p),
3835 dest_mem = get_memory_rtx (dest, len);
3836 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3837 builtin_strncpy_read_str,
3838 CONST_CAST (char *, p), dest_align, false, 0);
3839 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3840 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3847 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3848 bytes from constant string DATA + OFFSET and return it as target
3852 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3853 enum machine_mode mode)
3855 const char *c = (const char *) data;
3856 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3858 memset (p, *c, GET_MODE_SIZE (mode));
3860 return c_readstr (p, mode);
3863 /* Callback routine for store_by_pieces. Return the RTL of a register
3864 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3865 char value given in the RTL register data. For example, if mode is
3866 4 bytes wide, return the RTL for 0x01010101*data. */
3869 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3870 enum machine_mode mode)
3876 size = GET_MODE_SIZE (mode);
3880 p = XALLOCAVEC (char, size);
3881 memset (p, 1, size);
3882 coeff = c_readstr (p, mode);
3884 target = convert_to_mode (mode, (rtx) data, 1);
3885 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3886 return force_reg (mode, target);
3889 /* Expand expression EXP, which is a call to the memset builtin. Return
3890 NULL_RTX if we failed the caller should emit a normal call, otherwise
3891 try to get the result in TARGET, if convenient (and in mode MODE if that's
3895 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3897 if (!validate_arglist (exp,
3898 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3902 tree dest = CALL_EXPR_ARG (exp, 0);
3903 tree val = CALL_EXPR_ARG (exp, 1);
3904 tree len = CALL_EXPR_ARG (exp, 2);
3905 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3909 /* Helper function to do the actual work for expand_builtin_memset. The
3910 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3911 so that this can also be called without constructing an actual CALL_EXPR.
3912 The other arguments and return value are the same as for
3913 expand_builtin_memset. */
3916 expand_builtin_memset_args (tree dest, tree val, tree len,
3917 rtx target, enum machine_mode mode, tree orig_exp)
3920 enum built_in_function fcode;
3922 unsigned int dest_align;
3923 rtx dest_mem, dest_addr, len_rtx;
3924 HOST_WIDE_INT expected_size = -1;
3925 unsigned int expected_align = 0;
3926 tree_ann_common_t ann;
3928 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3930 /* If DEST is not a pointer type, don't do this operation in-line. */
3931 if (dest_align == 0)
3934 ann = tree_common_ann (orig_exp);
3936 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3938 if (expected_align < dest_align)
3939 expected_align = dest_align;
3941 /* If the LEN parameter is zero, return DEST. */
3942 if (integer_zerop (len))
3944 /* Evaluate and ignore VAL in case it has side-effects. */
3945 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3946 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3949 /* Stabilize the arguments in case we fail. */
3950 dest = builtin_save_expr (dest);
3951 val = builtin_save_expr (val);
3952 len = builtin_save_expr (len);
3954 len_rtx = expand_normal (len);
3955 dest_mem = get_memory_rtx (dest, len);
3957 if (TREE_CODE (val) != INTEGER_CST)
3961 val_rtx = expand_normal (val);
3962 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3965 /* Assume that we can memset by pieces if we can store
3966 * the coefficients by pieces (in the required modes).
3967 * We can't pass builtin_memset_gen_str as that emits RTL. */
3969 if (host_integerp (len, 1)
3970 && can_store_by_pieces (tree_low_cst (len, 1),
3971 builtin_memset_read_str, &c, dest_align,
3974 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3976 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3977 builtin_memset_gen_str, val_rtx, dest_align,
3980 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3981 dest_align, expected_align,
3985 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3986 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3990 if (target_char_cast (val, &c))
3995 if (host_integerp (len, 1)
3996 && can_store_by_pieces (tree_low_cst (len, 1),
3997 builtin_memset_read_str, &c, dest_align,
3999 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4000 builtin_memset_read_str, &c, dest_align, true, 0);
4001 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4002 dest_align, expected_align,
4006 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4007 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4011 set_mem_align (dest_mem, dest_align);
4012 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4013 CALL_EXPR_TAILCALL (orig_exp)
4014 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4015 expected_align, expected_size);
4019 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4020 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4026 fndecl = get_callee_fndecl (orig_exp);
4027 fcode = DECL_FUNCTION_CODE (fndecl);
4028 if (fcode == BUILT_IN_MEMSET)
4029 fn = build_call_expr (fndecl, 3, dest, val, len);
4030 else if (fcode == BUILT_IN_BZERO)
4031 fn = build_call_expr (fndecl, 2, dest, len);
4034 if (TREE_CODE (fn) == CALL_EXPR)
4035 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4036 return expand_call (fn, target, target == const0_rtx);
4039 /* Expand expression EXP, which is a call to the bzero builtin. Return
4040 NULL_RTX if we failed the caller should emit a normal call. */
4043 expand_builtin_bzero (tree exp)
4047 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4050 dest = CALL_EXPR_ARG (exp, 0);
4051 size = CALL_EXPR_ARG (exp, 1);
4053 /* New argument list transforming bzero(ptr x, int y) to
4054 memset(ptr x, int 0, size_t y). This is done this way
4055 so that if it isn't expanded inline, we fallback to
4056 calling bzero instead of memset. */
4058 return expand_builtin_memset_args (dest, integer_zero_node,
4059 fold_convert (sizetype, size),
4060 const0_rtx, VOIDmode, exp);
4063 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4064 caller should emit a normal call, otherwise try to get the result
4065 in TARGET, if convenient (and in mode MODE if that's convenient). */
4068 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4070 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4071 INTEGER_TYPE, VOID_TYPE))
4073 tree type = TREE_TYPE (exp);
4074 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4075 CALL_EXPR_ARG (exp, 1),
4076 CALL_EXPR_ARG (exp, 2), type);
4078 return expand_expr (result, target, mode, EXPAND_NORMAL);
4083 /* Expand expression EXP, which is a call to the memcmp built-in function.
4084 Return NULL_RTX if we failed and the
4085 caller should emit a normal call, otherwise try to get the result in
4086 TARGET, if convenient (and in mode MODE, if that's convenient). */
4089 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4091 if (!validate_arglist (exp,
4092 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4096 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4097 CALL_EXPR_ARG (exp, 1),
4098 CALL_EXPR_ARG (exp, 2));
4100 return expand_expr (result, target, mode, EXPAND_NORMAL);
4103 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4105 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4108 tree arg1 = CALL_EXPR_ARG (exp, 0);
4109 tree arg2 = CALL_EXPR_ARG (exp, 1);
4110 tree len = CALL_EXPR_ARG (exp, 2);
4113 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4115 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4116 enum machine_mode insn_mode;
4118 #ifdef HAVE_cmpmemsi
4120 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4123 #ifdef HAVE_cmpstrnsi
4125 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4130 /* If we don't have POINTER_TYPE, call the function. */
4131 if (arg1_align == 0 || arg2_align == 0)
4134 /* Make a place to write the result of the instruction. */
4137 && REG_P (result) && GET_MODE (result) == insn_mode
4138 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4139 result = gen_reg_rtx (insn_mode);
4141 arg1_rtx = get_memory_rtx (arg1, len);
4142 arg2_rtx = get_memory_rtx (arg2, len);
4143 arg3_rtx = expand_normal (len);
4145 /* Set MEM_SIZE as appropriate. */
4146 if (GET_CODE (arg3_rtx) == CONST_INT)
4148 set_mem_size (arg1_rtx, arg3_rtx);
4149 set_mem_size (arg2_rtx, arg3_rtx);
4152 #ifdef HAVE_cmpmemsi
4154 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4155 GEN_INT (MIN (arg1_align, arg2_align)));
4158 #ifdef HAVE_cmpstrnsi
4160 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4161 GEN_INT (MIN (arg1_align, arg2_align)));
4169 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4170 TYPE_MODE (integer_type_node), 3,
4171 XEXP (arg1_rtx, 0), Pmode,
4172 XEXP (arg2_rtx, 0), Pmode,
4173 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4174 TYPE_UNSIGNED (sizetype)),
4175 TYPE_MODE (sizetype));
4177 /* Return the value in the proper mode for this function. */
4178 mode = TYPE_MODE (TREE_TYPE (exp));
4179 if (GET_MODE (result) == mode)
4181 else if (target != 0)
4183 convert_move (target, result, 0);
4187 return convert_to_mode (mode, result, 0);
4194 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4195 if we failed the caller should emit a normal call, otherwise try to get
4196 the result in TARGET, if convenient. */
4199 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4201 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4205 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4206 CALL_EXPR_ARG (exp, 1));
4208 return expand_expr (result, target, mode, EXPAND_NORMAL);
4211 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4212 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4213 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4215 rtx arg1_rtx, arg2_rtx;
4216 rtx result, insn = NULL_RTX;
4218 tree arg1 = CALL_EXPR_ARG (exp, 0);
4219 tree arg2 = CALL_EXPR_ARG (exp, 1);
4222 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4224 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4226 /* If we don't have POINTER_TYPE, call the function. */
4227 if (arg1_align == 0 || arg2_align == 0)
4230 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4231 arg1 = builtin_save_expr (arg1);
4232 arg2 = builtin_save_expr (arg2);
4234 arg1_rtx = get_memory_rtx (arg1, NULL);
4235 arg2_rtx = get_memory_rtx (arg2, NULL);
4237 #ifdef HAVE_cmpstrsi
4238 /* Try to call cmpstrsi. */
4241 enum machine_mode insn_mode
4242 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4244 /* Make a place to write the result of the instruction. */
4247 && REG_P (result) && GET_MODE (result) == insn_mode
4248 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4249 result = gen_reg_rtx (insn_mode);
4251 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4252 GEN_INT (MIN (arg1_align, arg2_align)));
4255 #ifdef HAVE_cmpstrnsi
4256 /* Try to determine at least one length and call cmpstrnsi. */
4257 if (!insn && HAVE_cmpstrnsi)
4262 enum machine_mode insn_mode
4263 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4264 tree len1 = c_strlen (arg1, 1);
4265 tree len2 = c_strlen (arg2, 1);
4268 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4270 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4272 /* If we don't have a constant length for the first, use the length
4273 of the second, if we know it. We don't require a constant for
4274 this case; some cost analysis could be done if both are available
4275 but neither is constant. For now, assume they're equally cheap,
4276 unless one has side effects. If both strings have constant lengths,
4283 else if (TREE_SIDE_EFFECTS (len1))
4285 else if (TREE_SIDE_EFFECTS (len2))
4287 else if (TREE_CODE (len1) != INTEGER_CST)
4289 else if (TREE_CODE (len2) != INTEGER_CST)
4291 else if (tree_int_cst_lt (len1, len2))
4296 /* If both arguments have side effects, we cannot optimize. */
4297 if (!len || TREE_SIDE_EFFECTS (len))
4300 arg3_rtx = expand_normal (len);
4302 /* Make a place to write the result of the instruction. */
4305 && REG_P (result) && GET_MODE (result) == insn_mode
4306 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4307 result = gen_reg_rtx (insn_mode);
4309 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4310 GEN_INT (MIN (arg1_align, arg2_align)));
4318 /* Return the value in the proper mode for this function. */
4319 mode = TYPE_MODE (TREE_TYPE (exp));
4320 if (GET_MODE (result) == mode)
4323 return convert_to_mode (mode, result, 0);
4324 convert_move (target, result, 0);
4328 /* Expand the library call ourselves using a stabilized argument
4329 list to avoid re-evaluating the function's arguments twice. */
4330 #ifdef HAVE_cmpstrnsi
4333 fndecl = get_callee_fndecl (exp);
4334 fn = build_call_expr (fndecl, 2, arg1, arg2);
4335 if (TREE_CODE (fn) == CALL_EXPR)
4336 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4337 return expand_call (fn, target, target == const0_rtx);
4343 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4344 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4345 the result in TARGET, if convenient. */
4348 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4350 if (!validate_arglist (exp,
4351 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4355 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4356 CALL_EXPR_ARG (exp, 1),
4357 CALL_EXPR_ARG (exp, 2));
4359 return expand_expr (result, target, mode, EXPAND_NORMAL);
4362 /* If c_strlen can determine an expression for one of the string
4363 lengths, and it doesn't have side effects, then emit cmpstrnsi
4364 using length MIN(strlen(string)+1, arg3). */
4365 #ifdef HAVE_cmpstrnsi
4368 tree len, len1, len2;
4369 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4372 tree arg1 = CALL_EXPR_ARG (exp, 0);
4373 tree arg2 = CALL_EXPR_ARG (exp, 1);
4374 tree arg3 = CALL_EXPR_ARG (exp, 2);
4377 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4379 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4380 enum machine_mode insn_mode
4381 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4383 len1 = c_strlen (arg1, 1);
4384 len2 = c_strlen (arg2, 1);
4387 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4389 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4391 /* If we don't have a constant length for the first, use the length
4392 of the second, if we know it. We don't require a constant for
4393 this case; some cost analysis could be done if both are available
4394 but neither is constant. For now, assume they're equally cheap,
4395 unless one has side effects. If both strings have constant lengths,
4402 else if (TREE_SIDE_EFFECTS (len1))
4404 else if (TREE_SIDE_EFFECTS (len2))
4406 else if (TREE_CODE (len1) != INTEGER_CST)
4408 else if (TREE_CODE (len2) != INTEGER_CST)
4410 else if (tree_int_cst_lt (len1, len2))
4415 /* If both arguments have side effects, we cannot optimize. */
4416 if (!len || TREE_SIDE_EFFECTS (len))
4419 /* The actual new length parameter is MIN(len,arg3). */
4420 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4421 fold_convert (TREE_TYPE (len), arg3));
4423 /* If we don't have POINTER_TYPE, call the function. */
4424 if (arg1_align == 0 || arg2_align == 0)
4427 /* Make a place to write the result of the instruction. */
4430 && REG_P (result) && GET_MODE (result) == insn_mode
4431 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4432 result = gen_reg_rtx (insn_mode);
4434 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4435 arg1 = builtin_save_expr (arg1);
4436 arg2 = builtin_save_expr (arg2);
4437 len = builtin_save_expr (len);
4439 arg1_rtx = get_memory_rtx (arg1, len);
4440 arg2_rtx = get_memory_rtx (arg2, len);
4441 arg3_rtx = expand_normal (len);
4442 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4443 GEN_INT (MIN (arg1_align, arg2_align)));
4448 /* Return the value in the proper mode for this function. */
4449 mode = TYPE_MODE (TREE_TYPE (exp));
4450 if (GET_MODE (result) == mode)
4453 return convert_to_mode (mode, result, 0);
4454 convert_move (target, result, 0);
4458 /* Expand the library call ourselves using a stabilized argument
4459 list to avoid re-evaluating the function's arguments twice. */
4460 fndecl = get_callee_fndecl (exp);
4461 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4462 if (TREE_CODE (fn) == CALL_EXPR)
4463 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4464 return expand_call (fn, target, target == const0_rtx);
4470 /* Expand expression EXP, which is a call to the strcat builtin.
4471 Return NULL_RTX if we failed the caller should emit a normal call,
4472 otherwise try to get the result in TARGET, if convenient. */
4475 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4481 tree dst = CALL_EXPR_ARG (exp, 0);
4482 tree src = CALL_EXPR_ARG (exp, 1);
4483 const char *p = c_getstr (src);
4485 /* If the string length is zero, return the dst parameter. */
4486 if (p && *p == '\0')
4487 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4489 if (optimize_insn_for_speed_p ())
4491 /* See if we can store by pieces into (dst + strlen(dst)). */
4492 tree newsrc, newdst,
4493 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4496 /* Stabilize the argument list. */
4497 newsrc = builtin_save_expr (src);
4498 dst = builtin_save_expr (dst);
4502 /* Create strlen (dst). */
4503 newdst = build_call_expr (strlen_fn, 1, dst);
4504 /* Create (dst p+ strlen (dst)). */
4506 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4507 newdst = builtin_save_expr (newdst);
4509 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4511 end_sequence (); /* Stop sequence. */
4515 /* Output the entire sequence. */
4516 insns = get_insns ();
4520 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4527 /* Expand expression EXP, which is a call to the strncat builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4532 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4534 if (validate_arglist (exp,
4535 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4537 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4538 CALL_EXPR_ARG (exp, 1),
4539 CALL_EXPR_ARG (exp, 2));
4541 return expand_expr (result, target, mode, EXPAND_NORMAL);
4546 /* Expand expression EXP, which is a call to the strspn builtin.
4547 Return NULL_RTX if we failed the caller should emit a normal call,
4548 otherwise try to get the result in TARGET, if convenient. */
4551 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4553 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4555 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4556 CALL_EXPR_ARG (exp, 1));
4558 return expand_expr (result, target, mode, EXPAND_NORMAL);
4563 /* Expand expression EXP, which is a call to the strcspn builtin.
4564 Return NULL_RTX if we failed the caller should emit a normal call,
4565 otherwise try to get the result in TARGET, if convenient. */
4568 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4570 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4572 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4573 CALL_EXPR_ARG (exp, 1));
4575 return expand_expr (result, target, mode, EXPAND_NORMAL);
4580 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4581 if that's convenient. */
4584 expand_builtin_saveregs (void)
4588 /* Don't do __builtin_saveregs more than once in a function.
4589 Save the result of the first call and reuse it. */
4590 if (saveregs_value != 0)
4591 return saveregs_value;
4593 /* When this function is called, it means that registers must be
4594 saved on entry to this function. So we migrate the call to the
4595 first insn of this function. */
4599 /* Do whatever the machine needs done in this case. */
4600 val = targetm.calls.expand_builtin_saveregs ();
4605 saveregs_value = val;
4607 /* Put the insns after the NOTE that starts the function. If this
4608 is inside a start_sequence, make the outer-level insn chain current, so
4609 the code is placed at the start of the function. */
4610 push_topmost_sequence ();
4611 emit_insn_after (seq, entry_of_function ());
4612 pop_topmost_sequence ();
4617 /* __builtin_args_info (N) returns word N of the arg space info
4618 for the current function. The number and meanings of words
4619 is controlled by the definition of CUMULATIVE_ARGS. */
4622 expand_builtin_args_info (tree exp)
4624 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4625 int *word_ptr = (int *) &crtl->args.info;
4627 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4629 if (call_expr_nargs (exp) != 0)
4631 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4632 error ("argument of %<__builtin_args_info%> must be constant");
4635 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4637 if (wordnum < 0 || wordnum >= nwords)
4638 error ("argument of %<__builtin_args_info%> out of range");
4640 return GEN_INT (word_ptr[wordnum]);
4644 error ("missing argument in %<__builtin_args_info%>");
4649 /* Expand a call to __builtin_next_arg. */
4652 expand_builtin_next_arg (void)
4654 /* Checking arguments is already done in fold_builtin_next_arg
4655 that must be called before this function. */
4656 return expand_binop (ptr_mode, add_optab,
4657 crtl->args.internal_arg_pointer,
4658 crtl->args.arg_offset_rtx,
4659 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4662 /* Make it easier for the backends by protecting the valist argument
4663 from multiple evaluations. */
4666 stabilize_va_list (tree valist, int needs_lvalue)
4668 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4670 gcc_assert (vatype != NULL_TREE);
4672 if (TREE_CODE (vatype) == ARRAY_TYPE)
4674 if (TREE_SIDE_EFFECTS (valist))
4675 valist = save_expr (valist);
4677 /* For this case, the backends will be expecting a pointer to
4678 vatype, but it's possible we've actually been given an array
4679 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4681 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4683 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4684 valist = build_fold_addr_expr_with_type (valist, p1);
4693 if (! TREE_SIDE_EFFECTS (valist))
4696 pt = build_pointer_type (vatype);
4697 valist = fold_build1 (ADDR_EXPR, pt, valist);
4698 TREE_SIDE_EFFECTS (valist) = 1;
4701 if (TREE_SIDE_EFFECTS (valist))
4702 valist = save_expr (valist);
4703 valist = build_fold_indirect_ref (valist);
4709 /* The "standard" definition of va_list is void*. */
4712 std_build_builtin_va_list (void)
4714 return ptr_type_node;
4717 /* The "standard" abi va_list is va_list_type_node. */
4720 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4722 return va_list_type_node;
4725 /* The "standard" type of va_list is va_list_type_node. */
4728 std_canonical_va_list_type (tree type)
4732 if (INDIRECT_REF_P (type))
4733 type = TREE_TYPE (type);
4734 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4735 type = TREE_TYPE (type);
4736 wtype = va_list_type_node;
4738 /* Treat structure va_list types. */
4739 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4740 htype = TREE_TYPE (htype);
4741 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4743 /* If va_list is an array type, the argument may have decayed
4744 to a pointer type, e.g. by being passed to another function.
4745 In that case, unwrap both types so that we can compare the
4746 underlying records. */
4747 if (TREE_CODE (htype) == ARRAY_TYPE
4748 || POINTER_TYPE_P (htype))
4750 wtype = TREE_TYPE (wtype);
4751 htype = TREE_TYPE (htype);
4754 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4755 return va_list_type_node;
4760 /* The "standard" implementation of va_start: just assign `nextarg' to
4764 std_expand_builtin_va_start (tree valist, rtx nextarg)
4766 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4767 convert_move (va_r, nextarg, 0);
4770 /* Expand EXP, a call to __builtin_va_start. */
4773 expand_builtin_va_start (tree exp)
4778 if (call_expr_nargs (exp) < 2)
4780 error ("too few arguments to function %<va_start%>");
4784 if (fold_builtin_next_arg (exp, true))
4787 nextarg = expand_builtin_next_arg ();
4788 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4790 if (targetm.expand_builtin_va_start)
4791 targetm.expand_builtin_va_start (valist, nextarg);
4793 std_expand_builtin_va_start (valist, nextarg);
4798 /* The "standard" implementation of va_arg: read the value from the
4799 current (padded) address and increment by the (padded) size. */
4802 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4805 tree addr, t, type_size, rounded_size, valist_tmp;
4806 unsigned HOST_WIDE_INT align, boundary;
4809 #ifdef ARGS_GROW_DOWNWARD
4810 /* All of the alignment and movement below is for args-grow-up machines.
4811 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4812 implement their own specialized gimplify_va_arg_expr routines. */
4816 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4818 type = build_pointer_type (type);
4820 align = PARM_BOUNDARY / BITS_PER_UNIT;
4821 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4823 /* When we align parameter on stack for caller, if the parameter
4824 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4825 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4826 here with caller. */
4827 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4828 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4830 boundary /= BITS_PER_UNIT;
4832 /* Hoist the valist value into a temporary for the moment. */
4833 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4835 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4836 requires greater alignment, we must perform dynamic alignment. */
4837 if (boundary > align
4838 && !integer_zerop (TYPE_SIZE (type)))
4840 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4841 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4842 valist_tmp, size_int (boundary - 1)));
4843 gimplify_and_add (t, pre_p);
4845 t = fold_convert (sizetype, valist_tmp);
4846 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4847 fold_convert (TREE_TYPE (valist),
4848 fold_build2 (BIT_AND_EXPR, sizetype, t,
4849 size_int (-boundary))));
4850 gimplify_and_add (t, pre_p);
4855 /* If the actual alignment is less than the alignment of the type,
4856 adjust the type accordingly so that we don't assume strict alignment
4857 when dereferencing the pointer. */
4858 boundary *= BITS_PER_UNIT;
4859 if (boundary < TYPE_ALIGN (type))
4861 type = build_variant_type_copy (type);
4862 TYPE_ALIGN (type) = boundary;
4865 /* Compute the rounded size of the type. */
4866 type_size = size_in_bytes (type);
4867 rounded_size = round_up (type_size, align);
4869 /* Reduce rounded_size so it's sharable with the postqueue. */
4870 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4874 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4876 /* Small args are padded downward. */
4877 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4878 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4879 size_binop (MINUS_EXPR, rounded_size, type_size));
4880 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4883 /* Compute new value for AP. */
4884 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4885 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4886 gimplify_and_add (t, pre_p);
4888 addr = fold_convert (build_pointer_type (type), addr);
4891 addr = build_va_arg_indirect_ref (addr);
4893 return build_va_arg_indirect_ref (addr);
4896 /* Build an indirect-ref expression over the given TREE, which represents a
4897 piece of a va_arg() expansion. */
4899 build_va_arg_indirect_ref (tree addr)
4901 addr = build_fold_indirect_ref (addr);
4903 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4909 /* Return a dummy expression of type TYPE in order to keep going after an
4913 dummy_object (tree type)
4915 tree t = build_int_cst (build_pointer_type (type), 0);
4916 return build1 (INDIRECT_REF, type, t);
4919 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4920 builtin function, but a very special sort of operator. */
4922 enum gimplify_status
4923 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4925 tree promoted_type, have_va_type;
4926 tree valist = TREE_OPERAND (*expr_p, 0);
4927 tree type = TREE_TYPE (*expr_p);
4930 /* Verify that valist is of the proper type. */
4931 have_va_type = TREE_TYPE (valist);
4932 if (have_va_type == error_mark_node)
4934 have_va_type = targetm.canonical_va_list_type (have_va_type);
4936 if (have_va_type == NULL_TREE)
4938 error ("first argument to %<va_arg%> not of type %<va_list%>");
4942 /* Generate a diagnostic for requesting data of a type that cannot
4943 be passed through `...' due to type promotion at the call site. */
4944 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4947 static bool gave_help;
4950 /* Unfortunately, this is merely undefined, rather than a constraint
4951 violation, so we cannot make this an error. If this call is never
4952 executed, the program is still strictly conforming. */
4953 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4954 type, promoted_type);
4955 if (!gave_help && warned)
4958 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4959 promoted_type, type);
4962 /* We can, however, treat "undefined" any way we please.
4963 Call abort to encourage the user to fix the program. */
4965 inform (input_location, "if this code is reached, the program will abort");
4966 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4967 gimplify_and_add (t, pre_p);
4969 /* This is dead code, but go ahead and finish so that the
4970 mode of the result comes out right. */
4971 *expr_p = dummy_object (type);
4976 /* Make it easier for the backends by protecting the valist argument
4977 from multiple evaluations. */
4978 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4980 /* For this case, the backends will be expecting a pointer to
4981 TREE_TYPE (abi), but it's possible we've
4982 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4984 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4986 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4987 valist = build_fold_addr_expr_with_type (valist, p1);
4990 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4993 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4995 if (!targetm.gimplify_va_arg_expr)
4996 /* FIXME: Once most targets are converted we should merely
4997 assert this is non-null. */
5000 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5005 /* Expand EXP, a call to __builtin_va_end. */
5008 expand_builtin_va_end (tree exp)
5010 tree valist = CALL_EXPR_ARG (exp, 0);
5012 /* Evaluate for side effects, if needed. I hate macros that don't
5014 if (TREE_SIDE_EFFECTS (valist))
5015 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5020 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5021 builtin rather than just as an assignment in stdarg.h because of the
5022 nastiness of array-type va_list types. */
5025 expand_builtin_va_copy (tree exp)
5029 dst = CALL_EXPR_ARG (exp, 0);
5030 src = CALL_EXPR_ARG (exp, 1);
5032 dst = stabilize_va_list (dst, 1);
5033 src = stabilize_va_list (src, 0);
5035 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5037 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5039 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5040 TREE_SIDE_EFFECTS (t) = 1;
5041 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5045 rtx dstb, srcb, size;
5047 /* Evaluate to pointers. */
5048 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5049 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5050 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5051 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5053 dstb = convert_memory_address (Pmode, dstb);
5054 srcb = convert_memory_address (Pmode, srcb);
5056 /* "Dereference" to BLKmode memories. */
5057 dstb = gen_rtx_MEM (BLKmode, dstb);
5058 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5059 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5060 srcb = gen_rtx_MEM (BLKmode, srcb);
5061 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5062 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5065 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5071 /* Expand a call to one of the builtin functions __builtin_frame_address or
5072 __builtin_return_address. */
5075 expand_builtin_frame_address (tree fndecl, tree exp)
5077 /* The argument must be a nonnegative integer constant.
5078 It counts the number of frames to scan up the stack.
5079 The value is the return address saved in that frame. */
5080 if (call_expr_nargs (exp) == 0)
5081 /* Warning about missing arg was already issued. */
5083 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5085 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5086 error ("invalid argument to %<__builtin_frame_address%>");
5088 error ("invalid argument to %<__builtin_return_address%>");
5094 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5095 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5097 /* Some ports cannot access arbitrary stack frames. */
5100 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5101 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5103 warning (0, "unsupported argument to %<__builtin_return_address%>");
5107 /* For __builtin_frame_address, return what we've got. */
5108 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5112 && ! CONSTANT_P (tem))
5113 tem = copy_to_mode_reg (Pmode, tem);
5118 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5119 we failed and the caller should emit a normal call, otherwise try to get
5120 the result in TARGET, if convenient. */
5123 expand_builtin_alloca (tree exp, rtx target)
5128 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5129 should always expand to function calls. These can be intercepted
5134 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5137 /* Compute the argument. */
5138 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5140 /* Allocate the desired space. */
5141 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5142 result = convert_memory_address (ptr_mode, result);
5147 /* Expand a call to a bswap builtin with argument ARG0. MODE
5148 is the mode to expand with. */
5151 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5153 enum machine_mode mode;
5157 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5160 arg = CALL_EXPR_ARG (exp, 0);
5161 mode = TYPE_MODE (TREE_TYPE (arg));
5162 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5164 target = expand_unop (mode, bswap_optab, op0, target, 1);
5166 gcc_assert (target);
5168 return convert_to_mode (mode, target, 0);
5171 /* Expand a call to a unary builtin in EXP.
5172 Return NULL_RTX if a normal call should be emitted rather than expanding the
5173 function in-line. If convenient, the result should be placed in TARGET.
5174 SUBTARGET may be used as the target for computing one of EXP's operands. */
5177 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5178 rtx subtarget, optab op_optab)
5182 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5185 /* Compute the argument. */
5186 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5187 VOIDmode, EXPAND_NORMAL);
5188 /* Compute op, into TARGET if possible.
5189 Set TARGET to wherever the result comes back. */
5190 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5191 op_optab, op0, target, 1);
5192 gcc_assert (target);
5194 return convert_to_mode (target_mode, target, 0);
5197 /* If the string passed to fputs is a constant and is one character
5198 long, we attempt to transform this call into __builtin_fputc(). */
5201 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5203 /* Verify the arguments in the original call. */
5204 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5206 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5207 CALL_EXPR_ARG (exp, 1),
5208 (target == const0_rtx),
5209 unlocked, NULL_TREE);
5211 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5216 /* Expand a call to __builtin_expect. We just return our argument
5217 as the builtin_expect semantic should've been already executed by
5218 tree branch prediction pass. */
5221 expand_builtin_expect (tree exp, rtx target)
5225 if (call_expr_nargs (exp) < 2)
5227 arg = CALL_EXPR_ARG (exp, 0);
5228 c = CALL_EXPR_ARG (exp, 1);
5230 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5231 /* When guessing was done, the hints should be already stripped away. */
5232 gcc_assert (!flag_guess_branch_prob
5233 || optimize == 0 || errorcount || sorrycount);
5238 expand_builtin_trap (void)
5242 emit_insn (gen_trap ());
5245 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5249 /* Expand EXP, a call to fabs, fabsf or fabsl.
5250 Return NULL_RTX if a normal call should be emitted rather than expanding
5251 the function inline. If convenient, the result should be placed
5252 in TARGET. SUBTARGET may be used as the target for computing
5256 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5258 enum machine_mode mode;
5262 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5265 arg = CALL_EXPR_ARG (exp, 0);
5266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5267 mode = TYPE_MODE (TREE_TYPE (arg));
5268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5269 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5272 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5273 Return NULL is a normal call should be emitted rather than expanding the
5274 function inline. If convenient, the result should be placed in TARGET.
5275 SUBTARGET may be used as the target for computing the operand. */
5278 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5283 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5286 arg = CALL_EXPR_ARG (exp, 0);
5287 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5289 arg = CALL_EXPR_ARG (exp, 1);
5290 op1 = expand_normal (arg);
5292 return expand_copysign (op0, op1, target);
5295 /* Create a new constant string literal and return a char* pointer to it.
5296 The STRING_CST value is the LEN characters at STR. */
5298 build_string_literal (int len, const char *str)
5300 tree t, elem, index, type;
5302 t = build_string (len, str);
5303 elem = build_type_variant (char_type_node, 1, 0);
5304 index = build_index_type (size_int (len - 1));
5305 type = build_array_type (elem, index);
5306 TREE_TYPE (t) = type;
5307 TREE_CONSTANT (t) = 1;
5308 TREE_READONLY (t) = 1;
5309 TREE_STATIC (t) = 1;
5311 type = build_pointer_type (elem);
5312 t = build1 (ADDR_EXPR, type,
5313 build4 (ARRAY_REF, elem,
5314 t, integer_zero_node, NULL_TREE, NULL_TREE));
5318 /* Expand EXP, a call to printf or printf_unlocked.
5319 Return NULL_RTX if a normal call should be emitted rather than transforming
5320 the function inline. If convenient, the result should be placed in
5321 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5324 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5327 /* If we're using an unlocked function, assume the other unlocked
5328 functions exist explicitly. */
5329 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5330 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5331 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5332 : implicit_built_in_decls[BUILT_IN_PUTS];
5333 const char *fmt_str;
5336 int nargs = call_expr_nargs (exp);
5338 /* If the return value is used, don't do the transformation. */
5339 if (target != const0_rtx)
5342 /* Verify the required arguments in the original call. */
5345 fmt = CALL_EXPR_ARG (exp, 0);
5346 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5349 /* Check whether the format is a literal string constant. */
5350 fmt_str = c_getstr (fmt);
5351 if (fmt_str == NULL)
5354 if (!init_target_chars ())
5357 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5358 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5361 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5364 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5366 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5367 else if (strcmp (fmt_str, target_percent_c) == 0)
5370 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5373 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5377 /* We can't handle anything else with % args or %% ... yet. */
5378 if (strchr (fmt_str, target_percent))
5384 /* If the format specifier was "", printf does nothing. */
5385 if (fmt_str[0] == '\0')
5387 /* If the format specifier has length of 1, call putchar. */
5388 if (fmt_str[1] == '\0')
5390 /* Given printf("c"), (where c is any one character,)
5391 convert "c"[0] to an int and pass that to the replacement
5393 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5395 fn = build_call_expr (fn_putchar, 1, arg);
5399 /* If the format specifier was "string\n", call puts("string"). */
5400 size_t len = strlen (fmt_str);
5401 if ((unsigned char)fmt_str[len - 1] == target_newline)
5403 /* Create a NUL-terminated string that's one char shorter
5404 than the original, stripping off the trailing '\n'. */
5405 char *newstr = XALLOCAVEC (char, len);
5406 memcpy (newstr, fmt_str, len - 1);
5407 newstr[len - 1] = 0;
5408 arg = build_string_literal (len, newstr);
5410 fn = build_call_expr (fn_puts, 1, arg);
5413 /* We'd like to arrange to call fputs(string,stdout) here,
5414 but we need stdout and don't have a way to get it yet. */
5421 if (TREE_CODE (fn) == CALL_EXPR)
5422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5423 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5426 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5427 Return NULL_RTX if a normal call should be emitted rather than transforming
5428 the function inline. If convenient, the result should be placed in
5429 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5432 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5435 /* If we're using an unlocked function, assume the other unlocked
5436 functions exist explicitly. */
5437 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5438 : implicit_built_in_decls[BUILT_IN_FPUTC];
5439 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5440 : implicit_built_in_decls[BUILT_IN_FPUTS];
5441 const char *fmt_str;
5444 int nargs = call_expr_nargs (exp);
5446 /* If the return value is used, don't do the transformation. */
5447 if (target != const0_rtx)
5450 /* Verify the required arguments in the original call. */
5453 fp = CALL_EXPR_ARG (exp, 0);
5454 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5456 fmt = CALL_EXPR_ARG (exp, 1);
5457 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5460 /* Check whether the format is a literal string constant. */
5461 fmt_str = c_getstr (fmt);
5462 if (fmt_str == NULL)
5465 if (!init_target_chars ())
5468 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5469 if (strcmp (fmt_str, target_percent_s) == 0)
5472 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5474 arg = CALL_EXPR_ARG (exp, 2);
5476 fn = build_call_expr (fn_fputs, 2, arg, fp);
5478 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5479 else if (strcmp (fmt_str, target_percent_c) == 0)
5482 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5484 arg = CALL_EXPR_ARG (exp, 2);
5486 fn = build_call_expr (fn_fputc, 2, arg, fp);
5490 /* We can't handle anything else with % args or %% ... yet. */
5491 if (strchr (fmt_str, target_percent))
5497 /* If the format specifier was "", fprintf does nothing. */
5498 if (fmt_str[0] == '\0')
5500 /* Evaluate and ignore FILE* argument for side-effects. */
5501 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5505 /* When "string" doesn't contain %, replace all cases of
5506 fprintf(stream,string) with fputs(string,stream). The fputs
5507 builtin will take care of special cases like length == 1. */
5509 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5514 if (TREE_CODE (fn) == CALL_EXPR)
5515 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5516 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5519 /* Expand a call EXP to sprintf. Return NULL_RTX if
5520 a normal call should be emitted rather than expanding the function
5521 inline. If convenient, the result should be placed in TARGET with
5525 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5528 const char *fmt_str;
5529 int nargs = call_expr_nargs (exp);
5531 /* Verify the required arguments in the original call. */
5534 dest = CALL_EXPR_ARG (exp, 0);
5535 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5537 fmt = CALL_EXPR_ARG (exp, 0);
5538 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5541 /* Check whether the format is a literal string constant. */
5542 fmt_str = c_getstr (fmt);
5543 if (fmt_str == NULL)
5546 if (!init_target_chars ())
5549 /* If the format doesn't contain % args or %%, use strcpy. */
5550 if (strchr (fmt_str, target_percent) == 0)
5552 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5555 if ((nargs > 2) || ! fn)
5557 expand_expr (build_call_expr (fn, 2, dest, fmt),
5558 const0_rtx, VOIDmode, EXPAND_NORMAL);
5559 if (target == const0_rtx)
5561 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5562 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5564 /* If the format is "%s", use strcpy if the result isn't used. */
5565 else if (strcmp (fmt_str, target_percent_s) == 0)
5568 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5574 arg = CALL_EXPR_ARG (exp, 2);
5575 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5578 if (target != const0_rtx)
5580 len = c_strlen (arg, 1);
5581 if (! len || TREE_CODE (len) != INTEGER_CST)
5587 expand_expr (build_call_expr (fn, 2, dest, arg),
5588 const0_rtx, VOIDmode, EXPAND_NORMAL);
5590 if (target == const0_rtx)
5592 return expand_expr (len, target, mode, EXPAND_NORMAL);
5598 /* Expand a call to either the entry or exit function profiler. */
5601 expand_builtin_profile_func (bool exitp)
5603 rtx this_rtx, which;
5605 this_rtx = DECL_RTL (current_function_decl);
5606 gcc_assert (MEM_P (this_rtx));
5607 this_rtx = XEXP (this_rtx, 0);
5610 which = profile_function_exit_libfunc;
5612 which = profile_function_entry_libfunc;
5614 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5615 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5622 /* Expand a call to __builtin___clear_cache. */
5625 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5627 #ifndef HAVE_clear_cache
5628 #ifdef CLEAR_INSN_CACHE
5629 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5630 does something. Just do the default expansion to a call to
5634 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5635 does nothing. There is no need to call it. Do nothing. */
5637 #endif /* CLEAR_INSN_CACHE */
5639 /* We have a "clear_cache" insn, and it will handle everything. */
5641 rtx begin_rtx, end_rtx;
5642 enum insn_code icode;
5644 /* We must not expand to a library call. If we did, any
5645 fallback library function in libgcc that might contain a call to
5646 __builtin___clear_cache() would recurse infinitely. */
5647 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5649 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5653 if (HAVE_clear_cache)
5655 icode = CODE_FOR_clear_cache;
5657 begin = CALL_EXPR_ARG (exp, 0);
5658 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5659 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5660 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5661 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5663 end = CALL_EXPR_ARG (exp, 1);
5664 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5665 end_rtx = convert_memory_address (Pmode, end_rtx);
5666 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5667 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5669 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5672 #endif /* HAVE_clear_cache */
5675 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5678 round_trampoline_addr (rtx tramp)
5680 rtx temp, addend, mask;
5682 /* If we don't need too much alignment, we'll have been guaranteed
5683 proper alignment by get_trampoline_type. */
5684 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5687 /* Round address up to desired boundary. */
5688 temp = gen_reg_rtx (Pmode);
5689 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5690 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5692 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5693 temp, 0, OPTAB_LIB_WIDEN);
5694 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5695 temp, 0, OPTAB_LIB_WIDEN);
5701 expand_builtin_init_trampoline (tree exp)
5703 tree t_tramp, t_func, t_chain;
5704 rtx r_tramp, r_func, r_chain;
5705 #ifdef TRAMPOLINE_TEMPLATE
5709 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5710 POINTER_TYPE, VOID_TYPE))
5713 t_tramp = CALL_EXPR_ARG (exp, 0);
5714 t_func = CALL_EXPR_ARG (exp, 1);
5715 t_chain = CALL_EXPR_ARG (exp, 2);
5717 r_tramp = expand_normal (t_tramp);
5718 r_func = expand_normal (t_func);
5719 r_chain = expand_normal (t_chain);
5721 /* Generate insns to initialize the trampoline. */
5722 r_tramp = round_trampoline_addr (r_tramp);
5723 #ifdef TRAMPOLINE_TEMPLATE
5724 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5725 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5726 emit_block_move (blktramp, assemble_trampoline_template (),
5727 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5729 trampolines_created = 1;
5730 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5736 expand_builtin_adjust_trampoline (tree exp)
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5743 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5744 tramp = round_trampoline_addr (tramp);
5745 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5746 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5752 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5753 function. The function first checks whether the back end provides
5754 an insn to implement signbit for the respective mode. If not, it
5755 checks whether the floating point format of the value is such that
5756 the sign bit can be extracted. If that is not the case, the
5757 function returns NULL_RTX to indicate that a normal call should be
5758 emitted rather than expanding the function in-line. EXP is the
5759 expression that is a call to the builtin function; if convenient,
5760 the result should be placed in TARGET. */
5762 expand_builtin_signbit (tree exp, rtx target)
5764 const struct real_format *fmt;
5765 enum machine_mode fmode, imode, rmode;
5766 HOST_WIDE_INT hi, lo;
5769 enum insn_code icode;
5772 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5775 arg = CALL_EXPR_ARG (exp, 0);
5776 fmode = TYPE_MODE (TREE_TYPE (arg));
5777 rmode = TYPE_MODE (TREE_TYPE (exp));
5778 fmt = REAL_MODE_FORMAT (fmode);
5780 arg = builtin_save_expr (arg);
5782 /* Expand the argument yielding a RTX expression. */
5783 temp = expand_normal (arg);
5785 /* Check if the back end provides an insn that handles signbit for the
5787 icode = signbit_optab->handlers [(int) fmode].insn_code;
5788 if (icode != CODE_FOR_nothing)
5790 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5791 emit_unop_insn (icode, target, temp, UNKNOWN);
5795 /* For floating point formats without a sign bit, implement signbit
5797 bitpos = fmt->signbit_ro;
5800 /* But we can't do this if the format supports signed zero. */
5801 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5804 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5805 build_real (TREE_TYPE (arg), dconst0));
5806 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5809 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5811 imode = int_mode_for_mode (fmode);
5812 if (imode == BLKmode)
5814 temp = gen_lowpart (imode, temp);
5819 /* Handle targets with different FP word orders. */
5820 if (FLOAT_WORDS_BIG_ENDIAN)
5821 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5823 word = bitpos / BITS_PER_WORD;
5824 temp = operand_subword_force (temp, word, fmode);
5825 bitpos = bitpos % BITS_PER_WORD;
5828 /* Force the intermediate word_mode (or narrower) result into a
5829 register. This avoids attempting to create paradoxical SUBREGs
5830 of floating point modes below. */
5831 temp = force_reg (imode, temp);
5833 /* If the bitpos is within the "result mode" lowpart, the operation
5834 can be implement with a single bitwise AND. Otherwise, we need
5835 a right shift and an AND. */
5837 if (bitpos < GET_MODE_BITSIZE (rmode))
5839 if (bitpos < HOST_BITS_PER_WIDE_INT)
5842 lo = (HOST_WIDE_INT) 1 << bitpos;
5846 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5850 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5851 temp = gen_lowpart (rmode, temp);
5852 temp = expand_binop (rmode, and_optab, temp,
5853 immed_double_const (lo, hi, rmode),
5854 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5858 /* Perform a logical right shift to place the signbit in the least
5859 significant bit, then truncate the result to the desired mode
5860 and mask just this bit. */
5861 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5862 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5863 temp = gen_lowpart (rmode, temp);
5864 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5865 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5871 /* Expand fork or exec calls. TARGET is the desired target of the
5872 call. EXP is the call. FN is the
5873 identificator of the actual function. IGNORE is nonzero if the
5874 value is to be ignored. */
5877 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5882 /* If we are not profiling, just call the function. */
5883 if (!profile_arc_flag)
5886 /* Otherwise call the wrapper. This should be equivalent for the rest of
5887 compiler, so the code does not diverge, and the wrapper may run the
5888 code necessary for keeping the profiling sane. */
5890 switch (DECL_FUNCTION_CODE (fn))
5893 id = get_identifier ("__gcov_fork");
5896 case BUILT_IN_EXECL:
5897 id = get_identifier ("__gcov_execl");
5900 case BUILT_IN_EXECV:
5901 id = get_identifier ("__gcov_execv");
5904 case BUILT_IN_EXECLP:
5905 id = get_identifier ("__gcov_execlp");
5908 case BUILT_IN_EXECLE:
5909 id = get_identifier ("__gcov_execle");
5912 case BUILT_IN_EXECVP:
5913 id = get_identifier ("__gcov_execvp");
5916 case BUILT_IN_EXECVE:
5917 id = get_identifier ("__gcov_execve");
5924 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5925 DECL_EXTERNAL (decl) = 1;
5926 TREE_PUBLIC (decl) = 1;
5927 DECL_ARTIFICIAL (decl) = 1;
5928 TREE_NOTHROW (decl) = 1;
5929 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5930 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5931 call = rewrite_call_expr (exp, 0, decl, 0);
5932 return expand_call (call, target, ignore);
5937 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5938 the pointer in these functions is void*, the tree optimizers may remove
5939 casts. The mode computed in expand_builtin isn't reliable either, due
5940 to __sync_bool_compare_and_swap.
5942 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5943 group of builtins. This gives us log2 of the mode size. */
5945 static inline enum machine_mode
5946 get_builtin_sync_mode (int fcode_diff)
5948 /* The size is not negotiable, so ask not to get BLKmode in return
5949 if the target indicates that a smaller size would be better. */
5950 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5953 /* Expand the memory expression LOC and return the appropriate memory operand
5954 for the builtin_sync operations. */
5957 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5961 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5963 /* Note that we explicitly do not want any alias information for this
5964 memory, so that we kill all other live memories. Otherwise we don't
5965 satisfy the full barrier semantics of the intrinsic. */
5966 mem = validize_mem (gen_rtx_MEM (mode, addr));
5968 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5969 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5970 MEM_VOLATILE_P (mem) = 1;
5975 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5976 EXP is the CALL_EXPR. CODE is the rtx code
5977 that corresponds to the arithmetic or logical operation from the name;
5978 an exception here is that NOT actually means NAND. TARGET is an optional
5979 place for us to store the results; AFTER is true if this is the
5980 fetch_and_xxx form. IGNORE is true if we don't actually care about
5981 the result of the operation at all. */
5984 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5985 enum rtx_code code, bool after,
5986 rtx target, bool ignore)
5989 enum machine_mode old_mode;
5991 /* Expand the operands. */
5992 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5994 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5995 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5996 of CONST_INTs, where we know the old_mode only from the call argument. */
5997 old_mode = GET_MODE (val);
5998 if (old_mode == VOIDmode)
5999 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6000 val = convert_modes (mode, old_mode, val, 1);
6003 return expand_sync_operation (mem, val, code);
6005 return expand_sync_fetch_operation (mem, val, code, after, target);
6008 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6009 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6010 true if this is the boolean form. TARGET is a place for us to store the
6011 results; this is NOT optional if IS_BOOL is true. */
6014 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6015 bool is_bool, rtx target)
6017 rtx old_val, new_val, mem;
6018 enum machine_mode old_mode;
6020 /* Expand the operands. */
6021 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6024 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6025 mode, EXPAND_NORMAL);
6026 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6027 of CONST_INTs, where we know the old_mode only from the call argument. */
6028 old_mode = GET_MODE (old_val);
6029 if (old_mode == VOIDmode)
6030 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6031 old_val = convert_modes (mode, old_mode, old_val, 1);
6033 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6034 mode, EXPAND_NORMAL);
6035 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6036 of CONST_INTs, where we know the old_mode only from the call argument. */
6037 old_mode = GET_MODE (new_val);
6038 if (old_mode == VOIDmode)
6039 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6040 new_val = convert_modes (mode, old_mode, new_val, 1);
6043 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6045 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6048 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6049 general form is actually an atomic exchange, and some targets only
6050 support a reduced form with the second argument being a constant 1.
6051 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6055 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6059 enum machine_mode old_mode;
6061 /* Expand the operands. */
6062 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6063 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6064 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6065 of CONST_INTs, where we know the old_mode only from the call argument. */
6066 old_mode = GET_MODE (val);
6067 if (old_mode == VOIDmode)
6068 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6069 val = convert_modes (mode, old_mode, val, 1);
6071 return expand_sync_lock_test_and_set (mem, val, target);
6074 /* Expand the __sync_synchronize intrinsic. */
6077 expand_builtin_synchronize (void)
6081 #ifdef HAVE_memory_barrier
6082 if (HAVE_memory_barrier)
6084 emit_insn (gen_memory_barrier ());
6089 if (synchronize_libfunc != NULL_RTX)
6091 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6095 /* If no explicit memory barrier instruction is available, create an
6096 empty asm stmt with a memory clobber. */
6097 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6098 tree_cons (NULL, build_string (6, "memory"), NULL));
6099 ASM_VOLATILE_P (x) = 1;
6100 expand_asm_expr (x);
6103 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6106 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6108 enum insn_code icode;
6110 rtx val = const0_rtx;
6112 /* Expand the operands. */
6113 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6115 /* If there is an explicit operation in the md file, use it. */
6116 icode = sync_lock_release[mode];
6117 if (icode != CODE_FOR_nothing)
6119 if (!insn_data[icode].operand[1].predicate (val, mode))
6120 val = force_reg (mode, val);
6122 insn = GEN_FCN (icode) (mem, val);
6130 /* Otherwise we can implement this operation by emitting a barrier
6131 followed by a store of zero. */
6132 expand_builtin_synchronize ();
6133 emit_move_insn (mem, val);
6136 /* Expand an expression EXP that calls a built-in function,
6137 with result going to TARGET if that's convenient
6138 (and in mode MODE if that's convenient).
6139 SUBTARGET may be used as the target for computing one of EXP's operands.
6140 IGNORE is nonzero if the value is to be ignored. */
6143 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6146 tree fndecl = get_callee_fndecl (exp);
6147 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6148 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6150 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6151 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6153 /* When not optimizing, generate calls to library functions for a certain
6156 && !called_as_built_in (fndecl)
6157 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6158 && fcode != BUILT_IN_ALLOCA
6159 && fcode != BUILT_IN_FREE)
6160 return expand_call (exp, target, ignore);
6162 /* The built-in function expanders test for target == const0_rtx
6163 to determine whether the function's result will be ignored. */
6165 target = const0_rtx;
6167 /* If the result of a pure or const built-in function is ignored, and
6168 none of its arguments are volatile, we can avoid expanding the
6169 built-in call and just evaluate the arguments for side-effects. */
6170 if (target == const0_rtx
6171 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6173 bool volatilep = false;
6175 call_expr_arg_iterator iter;
6177 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6178 if (TREE_THIS_VOLATILE (arg))
6186 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6187 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6194 CASE_FLT_FN (BUILT_IN_FABS):
6195 target = expand_builtin_fabs (exp, target, subtarget);
6200 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6201 target = expand_builtin_copysign (exp, target, subtarget);
6206 /* Just do a normal library call if we were unable to fold
6208 CASE_FLT_FN (BUILT_IN_CABS):
6211 CASE_FLT_FN (BUILT_IN_EXP):
6212 CASE_FLT_FN (BUILT_IN_EXP10):
6213 CASE_FLT_FN (BUILT_IN_POW10):
6214 CASE_FLT_FN (BUILT_IN_EXP2):
6215 CASE_FLT_FN (BUILT_IN_EXPM1):
6216 CASE_FLT_FN (BUILT_IN_LOGB):
6217 CASE_FLT_FN (BUILT_IN_LOG):
6218 CASE_FLT_FN (BUILT_IN_LOG10):
6219 CASE_FLT_FN (BUILT_IN_LOG2):
6220 CASE_FLT_FN (BUILT_IN_LOG1P):
6221 CASE_FLT_FN (BUILT_IN_TAN):
6222 CASE_FLT_FN (BUILT_IN_ASIN):
6223 CASE_FLT_FN (BUILT_IN_ACOS):
6224 CASE_FLT_FN (BUILT_IN_ATAN):
6225 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6226 because of possible accuracy problems. */
6227 if (! flag_unsafe_math_optimizations)
6229 CASE_FLT_FN (BUILT_IN_SQRT):
6230 CASE_FLT_FN (BUILT_IN_FLOOR):
6231 CASE_FLT_FN (BUILT_IN_CEIL):
6232 CASE_FLT_FN (BUILT_IN_TRUNC):
6233 CASE_FLT_FN (BUILT_IN_ROUND):
6234 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6235 CASE_FLT_FN (BUILT_IN_RINT):
6236 target = expand_builtin_mathfn (exp, target, subtarget);
6241 CASE_FLT_FN (BUILT_IN_ILOGB):
6242 if (! flag_unsafe_math_optimizations)
6244 CASE_FLT_FN (BUILT_IN_ISINF):
6245 CASE_FLT_FN (BUILT_IN_FINITE):
6246 case BUILT_IN_ISFINITE:
6247 case BUILT_IN_ISNORMAL:
6248 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6253 CASE_FLT_FN (BUILT_IN_LCEIL):
6254 CASE_FLT_FN (BUILT_IN_LLCEIL):
6255 CASE_FLT_FN (BUILT_IN_LFLOOR):
6256 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6257 target = expand_builtin_int_roundingfn (exp, target);
6262 CASE_FLT_FN (BUILT_IN_LRINT):
6263 CASE_FLT_FN (BUILT_IN_LLRINT):
6264 CASE_FLT_FN (BUILT_IN_LROUND):
6265 CASE_FLT_FN (BUILT_IN_LLROUND):
6266 target = expand_builtin_int_roundingfn_2 (exp, target);
6271 CASE_FLT_FN (BUILT_IN_POW):
6272 target = expand_builtin_pow (exp, target, subtarget);
6277 CASE_FLT_FN (BUILT_IN_POWI):
6278 target = expand_builtin_powi (exp, target, subtarget);
6283 CASE_FLT_FN (BUILT_IN_ATAN2):
6284 CASE_FLT_FN (BUILT_IN_LDEXP):
6285 CASE_FLT_FN (BUILT_IN_SCALB):
6286 CASE_FLT_FN (BUILT_IN_SCALBN):
6287 CASE_FLT_FN (BUILT_IN_SCALBLN):
6288 if (! flag_unsafe_math_optimizations)
6291 CASE_FLT_FN (BUILT_IN_FMOD):
6292 CASE_FLT_FN (BUILT_IN_REMAINDER):
6293 CASE_FLT_FN (BUILT_IN_DREM):
6294 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6299 CASE_FLT_FN (BUILT_IN_CEXPI):
6300 target = expand_builtin_cexpi (exp, target, subtarget);
6301 gcc_assert (target);
6304 CASE_FLT_FN (BUILT_IN_SIN):
6305 CASE_FLT_FN (BUILT_IN_COS):
6306 if (! flag_unsafe_math_optimizations)
6308 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6313 CASE_FLT_FN (BUILT_IN_SINCOS):
6314 if (! flag_unsafe_math_optimizations)
6316 target = expand_builtin_sincos (exp);
6321 case BUILT_IN_APPLY_ARGS:
6322 return expand_builtin_apply_args ();
6324 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6325 FUNCTION with a copy of the parameters described by
6326 ARGUMENTS, and ARGSIZE. It returns a block of memory
6327 allocated on the stack into which is stored all the registers
6328 that might possibly be used for returning the result of a
6329 function. ARGUMENTS is the value returned by
6330 __builtin_apply_args. ARGSIZE is the number of bytes of
6331 arguments that must be copied. ??? How should this value be
6332 computed? We'll also need a safe worst case value for varargs
6334 case BUILT_IN_APPLY:
6335 if (!validate_arglist (exp, POINTER_TYPE,
6336 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6337 && !validate_arglist (exp, REFERENCE_TYPE,
6338 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6344 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6345 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6346 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6348 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6351 /* __builtin_return (RESULT) causes the function to return the
6352 value described by RESULT. RESULT is address of the block of
6353 memory returned by __builtin_apply. */
6354 case BUILT_IN_RETURN:
6355 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6356 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6359 case BUILT_IN_SAVEREGS:
6360 return expand_builtin_saveregs ();
6362 case BUILT_IN_ARGS_INFO:
6363 return expand_builtin_args_info (exp);
6365 case BUILT_IN_VA_ARG_PACK:
6366 /* All valid uses of __builtin_va_arg_pack () are removed during
6368 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6371 case BUILT_IN_VA_ARG_PACK_LEN:
6372 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6374 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6377 /* Return the address of the first anonymous stack arg. */
6378 case BUILT_IN_NEXT_ARG:
6379 if (fold_builtin_next_arg (exp, false))
6381 return expand_builtin_next_arg ();
6383 case BUILT_IN_CLEAR_CACHE:
6384 target = expand_builtin___clear_cache (exp);
6389 case BUILT_IN_CLASSIFY_TYPE:
6390 return expand_builtin_classify_type (exp);
6392 case BUILT_IN_CONSTANT_P:
6395 case BUILT_IN_FRAME_ADDRESS:
6396 case BUILT_IN_RETURN_ADDRESS:
6397 return expand_builtin_frame_address (fndecl, exp);
6399 /* Returns the address of the area where the structure is returned.
6401 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6402 if (call_expr_nargs (exp) != 0
6403 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6404 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6407 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6409 case BUILT_IN_ALLOCA:
6410 target = expand_builtin_alloca (exp, target);
6415 case BUILT_IN_STACK_SAVE:
6416 return expand_stack_save ();
6418 case BUILT_IN_STACK_RESTORE:
6419 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6422 case BUILT_IN_BSWAP32:
6423 case BUILT_IN_BSWAP64:
6424 target = expand_builtin_bswap (exp, target, subtarget);
6430 CASE_INT_FN (BUILT_IN_FFS):
6431 case BUILT_IN_FFSIMAX:
6432 target = expand_builtin_unop (target_mode, exp, target,
6433 subtarget, ffs_optab);
6438 CASE_INT_FN (BUILT_IN_CLZ):
6439 case BUILT_IN_CLZIMAX:
6440 target = expand_builtin_unop (target_mode, exp, target,
6441 subtarget, clz_optab);
6446 CASE_INT_FN (BUILT_IN_CTZ):
6447 case BUILT_IN_CTZIMAX:
6448 target = expand_builtin_unop (target_mode, exp, target,
6449 subtarget, ctz_optab);
6454 CASE_INT_FN (BUILT_IN_POPCOUNT):
6455 case BUILT_IN_POPCOUNTIMAX:
6456 target = expand_builtin_unop (target_mode, exp, target,
6457 subtarget, popcount_optab);
6462 CASE_INT_FN (BUILT_IN_PARITY):
6463 case BUILT_IN_PARITYIMAX:
6464 target = expand_builtin_unop (target_mode, exp, target,
6465 subtarget, parity_optab);
6470 case BUILT_IN_STRLEN:
6471 target = expand_builtin_strlen (exp, target, target_mode);
6476 case BUILT_IN_STRCPY:
6477 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6482 case BUILT_IN_STRNCPY:
6483 target = expand_builtin_strncpy (exp, target, mode);
6488 case BUILT_IN_STPCPY:
6489 target = expand_builtin_stpcpy (exp, target, mode);
6494 case BUILT_IN_STRCAT:
6495 target = expand_builtin_strcat (fndecl, exp, target, mode);
6500 case BUILT_IN_STRNCAT:
6501 target = expand_builtin_strncat (exp, target, mode);
6506 case BUILT_IN_STRSPN:
6507 target = expand_builtin_strspn (exp, target, mode);
6512 case BUILT_IN_STRCSPN:
6513 target = expand_builtin_strcspn (exp, target, mode);
6518 case BUILT_IN_STRSTR:
6519 target = expand_builtin_strstr (exp, target, mode);
6524 case BUILT_IN_STRPBRK:
6525 target = expand_builtin_strpbrk (exp, target, mode);
6530 case BUILT_IN_INDEX:
6531 case BUILT_IN_STRCHR:
6532 target = expand_builtin_strchr (exp, target, mode);
6537 case BUILT_IN_RINDEX:
6538 case BUILT_IN_STRRCHR:
6539 target = expand_builtin_strrchr (exp, target, mode);
6544 case BUILT_IN_MEMCPY:
6545 target = expand_builtin_memcpy (exp, target, mode);
6550 case BUILT_IN_MEMPCPY:
6551 target = expand_builtin_mempcpy (exp, target, mode);
6556 case BUILT_IN_MEMMOVE:
6557 target = expand_builtin_memmove (exp, target, mode, ignore);
6562 case BUILT_IN_BCOPY:
6563 target = expand_builtin_bcopy (exp, ignore);
6568 case BUILT_IN_MEMSET:
6569 target = expand_builtin_memset (exp, target, mode);
6574 case BUILT_IN_BZERO:
6575 target = expand_builtin_bzero (exp);
6580 case BUILT_IN_STRCMP:
6581 target = expand_builtin_strcmp (exp, target, mode);
6586 case BUILT_IN_STRNCMP:
6587 target = expand_builtin_strncmp (exp, target, mode);
6592 case BUILT_IN_MEMCHR:
6593 target = expand_builtin_memchr (exp, target, mode);
6599 case BUILT_IN_MEMCMP:
6600 target = expand_builtin_memcmp (exp, target, mode);
6605 case BUILT_IN_SETJMP:
6606 /* This should have been lowered to the builtins below. */
6609 case BUILT_IN_SETJMP_SETUP:
6610 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6611 and the receiver label. */
6612 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6614 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6615 VOIDmode, EXPAND_NORMAL);
6616 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6617 rtx label_r = label_rtx (label);
6619 /* This is copied from the handling of non-local gotos. */
6620 expand_builtin_setjmp_setup (buf_addr, label_r);
6621 nonlocal_goto_handler_labels
6622 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6623 nonlocal_goto_handler_labels);
6624 /* ??? Do not let expand_label treat us as such since we would
6625 not want to be both on the list of non-local labels and on
6626 the list of forced labels. */
6627 FORCED_LABEL (label) = 0;
6632 case BUILT_IN_SETJMP_DISPATCHER:
6633 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6634 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6636 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6637 rtx label_r = label_rtx (label);
6639 /* Remove the dispatcher label from the list of non-local labels
6640 since the receiver labels have been added to it above. */
6641 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6646 case BUILT_IN_SETJMP_RECEIVER:
6647 /* __builtin_setjmp_receiver is passed the receiver label. */
6648 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6650 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6651 rtx label_r = label_rtx (label);
6653 expand_builtin_setjmp_receiver (label_r);
6658 /* __builtin_longjmp is passed a pointer to an array of five words.
6659 It's similar to the C library longjmp function but works with
6660 __builtin_setjmp above. */
6661 case BUILT_IN_LONGJMP:
6662 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6664 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6665 VOIDmode, EXPAND_NORMAL);
6666 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6668 if (value != const1_rtx)
6670 error ("%<__builtin_longjmp%> second argument must be 1");
6674 expand_builtin_longjmp (buf_addr, value);
6679 case BUILT_IN_NONLOCAL_GOTO:
6680 target = expand_builtin_nonlocal_goto (exp);
6685 /* This updates the setjmp buffer that is its argument with the value
6686 of the current stack pointer. */
6687 case BUILT_IN_UPDATE_SETJMP_BUF:
6688 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6691 = expand_normal (CALL_EXPR_ARG (exp, 0));
6693 expand_builtin_update_setjmp_buf (buf_addr);
6699 expand_builtin_trap ();
6702 case BUILT_IN_PRINTF:
6703 target = expand_builtin_printf (exp, target, mode, false);
6708 case BUILT_IN_PRINTF_UNLOCKED:
6709 target = expand_builtin_printf (exp, target, mode, true);
6714 case BUILT_IN_FPUTS:
6715 target = expand_builtin_fputs (exp, target, false);
6719 case BUILT_IN_FPUTS_UNLOCKED:
6720 target = expand_builtin_fputs (exp, target, true);
6725 case BUILT_IN_FPRINTF:
6726 target = expand_builtin_fprintf (exp, target, mode, false);
6731 case BUILT_IN_FPRINTF_UNLOCKED:
6732 target = expand_builtin_fprintf (exp, target, mode, true);
6737 case BUILT_IN_SPRINTF:
6738 target = expand_builtin_sprintf (exp, target, mode);
6743 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6744 case BUILT_IN_SIGNBITD32:
6745 case BUILT_IN_SIGNBITD64:
6746 case BUILT_IN_SIGNBITD128:
6747 target = expand_builtin_signbit (exp, target);
6752 /* Various hooks for the DWARF 2 __throw routine. */
6753 case BUILT_IN_UNWIND_INIT:
6754 expand_builtin_unwind_init ();
6756 case BUILT_IN_DWARF_CFA:
6757 return virtual_cfa_rtx;
6758 #ifdef DWARF2_UNWIND_INFO
6759 case BUILT_IN_DWARF_SP_COLUMN:
6760 return expand_builtin_dwarf_sp_column ();
6761 case BUILT_IN_INIT_DWARF_REG_SIZES:
6762 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6765 case BUILT_IN_FROB_RETURN_ADDR:
6766 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6767 case BUILT_IN_EXTRACT_RETURN_ADDR:
6768 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6769 case BUILT_IN_EH_RETURN:
6770 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6771 CALL_EXPR_ARG (exp, 1));
6773 #ifdef EH_RETURN_DATA_REGNO
6774 case BUILT_IN_EH_RETURN_DATA_REGNO:
6775 return expand_builtin_eh_return_data_regno (exp);
6777 case BUILT_IN_EXTEND_POINTER:
6778 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6780 case BUILT_IN_VA_START:
6781 return expand_builtin_va_start (exp);
6782 case BUILT_IN_VA_END:
6783 return expand_builtin_va_end (exp);
6784 case BUILT_IN_VA_COPY:
6785 return expand_builtin_va_copy (exp);
6786 case BUILT_IN_EXPECT:
6787 return expand_builtin_expect (exp, target);
6788 case BUILT_IN_PREFETCH:
6789 expand_builtin_prefetch (exp);
6792 case BUILT_IN_PROFILE_FUNC_ENTER:
6793 return expand_builtin_profile_func (false);
6794 case BUILT_IN_PROFILE_FUNC_EXIT:
6795 return expand_builtin_profile_func (true);
6797 case BUILT_IN_INIT_TRAMPOLINE:
6798 return expand_builtin_init_trampoline (exp);
6799 case BUILT_IN_ADJUST_TRAMPOLINE:
6800 return expand_builtin_adjust_trampoline (exp);
6803 case BUILT_IN_EXECL:
6804 case BUILT_IN_EXECV:
6805 case BUILT_IN_EXECLP:
6806 case BUILT_IN_EXECLE:
6807 case BUILT_IN_EXECVP:
6808 case BUILT_IN_EXECVE:
6809 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6814 case BUILT_IN_FETCH_AND_ADD_1:
6815 case BUILT_IN_FETCH_AND_ADD_2:
6816 case BUILT_IN_FETCH_AND_ADD_4:
6817 case BUILT_IN_FETCH_AND_ADD_8:
6818 case BUILT_IN_FETCH_AND_ADD_16:
6819 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6820 target = expand_builtin_sync_operation (mode, exp, PLUS,
6821 false, target, ignore);
6826 case BUILT_IN_FETCH_AND_SUB_1:
6827 case BUILT_IN_FETCH_AND_SUB_2:
6828 case BUILT_IN_FETCH_AND_SUB_4:
6829 case BUILT_IN_FETCH_AND_SUB_8:
6830 case BUILT_IN_FETCH_AND_SUB_16:
6831 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6832 target = expand_builtin_sync_operation (mode, exp, MINUS,
6833 false, target, ignore);
6838 case BUILT_IN_FETCH_AND_OR_1:
6839 case BUILT_IN_FETCH_AND_OR_2:
6840 case BUILT_IN_FETCH_AND_OR_4:
6841 case BUILT_IN_FETCH_AND_OR_8:
6842 case BUILT_IN_FETCH_AND_OR_16:
6843 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6844 target = expand_builtin_sync_operation (mode, exp, IOR,
6845 false, target, ignore);
6850 case BUILT_IN_FETCH_AND_AND_1:
6851 case BUILT_IN_FETCH_AND_AND_2:
6852 case BUILT_IN_FETCH_AND_AND_4:
6853 case BUILT_IN_FETCH_AND_AND_8:
6854 case BUILT_IN_FETCH_AND_AND_16:
6855 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6856 target = expand_builtin_sync_operation (mode, exp, AND,
6857 false, target, ignore);
6862 case BUILT_IN_FETCH_AND_XOR_1:
6863 case BUILT_IN_FETCH_AND_XOR_2:
6864 case BUILT_IN_FETCH_AND_XOR_4:
6865 case BUILT_IN_FETCH_AND_XOR_8:
6866 case BUILT_IN_FETCH_AND_XOR_16:
6867 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6868 target = expand_builtin_sync_operation (mode, exp, XOR,
6869 false, target, ignore);
6874 case BUILT_IN_FETCH_AND_NAND_1:
6875 case BUILT_IN_FETCH_AND_NAND_2:
6876 case BUILT_IN_FETCH_AND_NAND_4:
6877 case BUILT_IN_FETCH_AND_NAND_8:
6878 case BUILT_IN_FETCH_AND_NAND_16:
6879 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6880 target = expand_builtin_sync_operation (mode, exp, NOT,
6881 false, target, ignore);
6886 case BUILT_IN_ADD_AND_FETCH_1:
6887 case BUILT_IN_ADD_AND_FETCH_2:
6888 case BUILT_IN_ADD_AND_FETCH_4:
6889 case BUILT_IN_ADD_AND_FETCH_8:
6890 case BUILT_IN_ADD_AND_FETCH_16:
6891 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6892 target = expand_builtin_sync_operation (mode, exp, PLUS,
6893 true, target, ignore);
6898 case BUILT_IN_SUB_AND_FETCH_1:
6899 case BUILT_IN_SUB_AND_FETCH_2:
6900 case BUILT_IN_SUB_AND_FETCH_4:
6901 case BUILT_IN_SUB_AND_FETCH_8:
6902 case BUILT_IN_SUB_AND_FETCH_16:
6903 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6904 target = expand_builtin_sync_operation (mode, exp, MINUS,
6905 true, target, ignore);
6910 case BUILT_IN_OR_AND_FETCH_1:
6911 case BUILT_IN_OR_AND_FETCH_2:
6912 case BUILT_IN_OR_AND_FETCH_4:
6913 case BUILT_IN_OR_AND_FETCH_8:
6914 case BUILT_IN_OR_AND_FETCH_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6916 target = expand_builtin_sync_operation (mode, exp, IOR,
6917 true, target, ignore);
6922 case BUILT_IN_AND_AND_FETCH_1:
6923 case BUILT_IN_AND_AND_FETCH_2:
6924 case BUILT_IN_AND_AND_FETCH_4:
6925 case BUILT_IN_AND_AND_FETCH_8:
6926 case BUILT_IN_AND_AND_FETCH_16:
6927 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6928 target = expand_builtin_sync_operation (mode, exp, AND,
6929 true, target, ignore);
6934 case BUILT_IN_XOR_AND_FETCH_1:
6935 case BUILT_IN_XOR_AND_FETCH_2:
6936 case BUILT_IN_XOR_AND_FETCH_4:
6937 case BUILT_IN_XOR_AND_FETCH_8:
6938 case BUILT_IN_XOR_AND_FETCH_16:
6939 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6940 target = expand_builtin_sync_operation (mode, exp, XOR,
6941 true, target, ignore);
6946 case BUILT_IN_NAND_AND_FETCH_1:
6947 case BUILT_IN_NAND_AND_FETCH_2:
6948 case BUILT_IN_NAND_AND_FETCH_4:
6949 case BUILT_IN_NAND_AND_FETCH_8:
6950 case BUILT_IN_NAND_AND_FETCH_16:
6951 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6952 target = expand_builtin_sync_operation (mode, exp, NOT,
6953 true, target, ignore);
6958 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6959 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6960 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6961 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6962 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6963 if (mode == VOIDmode)
6964 mode = TYPE_MODE (boolean_type_node);
6965 if (!target || !register_operand (target, mode))
6966 target = gen_reg_rtx (mode);
6968 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6969 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6974 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6975 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6976 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6977 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6978 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6980 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6985 case BUILT_IN_LOCK_TEST_AND_SET_1:
6986 case BUILT_IN_LOCK_TEST_AND_SET_2:
6987 case BUILT_IN_LOCK_TEST_AND_SET_4:
6988 case BUILT_IN_LOCK_TEST_AND_SET_8:
6989 case BUILT_IN_LOCK_TEST_AND_SET_16:
6990 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6991 target = expand_builtin_lock_test_and_set (mode, exp, target);
6996 case BUILT_IN_LOCK_RELEASE_1:
6997 case BUILT_IN_LOCK_RELEASE_2:
6998 case BUILT_IN_LOCK_RELEASE_4:
6999 case BUILT_IN_LOCK_RELEASE_8:
7000 case BUILT_IN_LOCK_RELEASE_16:
7001 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7002 expand_builtin_lock_release (mode, exp);
7005 case BUILT_IN_SYNCHRONIZE:
7006 expand_builtin_synchronize ();
7009 case BUILT_IN_OBJECT_SIZE:
7010 return expand_builtin_object_size (exp);
7012 case BUILT_IN_MEMCPY_CHK:
7013 case BUILT_IN_MEMPCPY_CHK:
7014 case BUILT_IN_MEMMOVE_CHK:
7015 case BUILT_IN_MEMSET_CHK:
7016 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7021 case BUILT_IN_STRCPY_CHK:
7022 case BUILT_IN_STPCPY_CHK:
7023 case BUILT_IN_STRNCPY_CHK:
7024 case BUILT_IN_STRCAT_CHK:
7025 case BUILT_IN_STRNCAT_CHK:
7026 case BUILT_IN_SNPRINTF_CHK:
7027 case BUILT_IN_VSNPRINTF_CHK:
7028 maybe_emit_chk_warning (exp, fcode);
7031 case BUILT_IN_SPRINTF_CHK:
7032 case BUILT_IN_VSPRINTF_CHK:
7033 maybe_emit_sprintf_chk_warning (exp, fcode);
7037 maybe_emit_free_warning (exp);
7040 default: /* just do library call, if unknown builtin */
7044 /* The switch statement above can drop through to cause the function
7045 to be called normally. */
7046 return expand_call (exp, target, ignore);
7049 /* Determine whether a tree node represents a call to a built-in
7050 function. If the tree T is a call to a built-in function with
7051 the right number of arguments of the appropriate types, return
7052 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7053 Otherwise the return value is END_BUILTINS. */
7055 enum built_in_function
7056 builtin_mathfn_code (const_tree t)
7058 const_tree fndecl, arg, parmlist;
7059 const_tree argtype, parmtype;
7060 const_call_expr_arg_iterator iter;
7062 if (TREE_CODE (t) != CALL_EXPR
7063 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7064 return END_BUILTINS;
7066 fndecl = get_callee_fndecl (t);
7067 if (fndecl == NULL_TREE
7068 || TREE_CODE (fndecl) != FUNCTION_DECL
7069 || ! DECL_BUILT_IN (fndecl)
7070 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7071 return END_BUILTINS;
7073 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7074 init_const_call_expr_arg_iterator (t, &iter);
7075 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7077 /* If a function doesn't take a variable number of arguments,
7078 the last element in the list will have type `void'. */
7079 parmtype = TREE_VALUE (parmlist);
7080 if (VOID_TYPE_P (parmtype))
7082 if (more_const_call_expr_args_p (&iter))
7083 return END_BUILTINS;
7084 return DECL_FUNCTION_CODE (fndecl);
7087 if (! more_const_call_expr_args_p (&iter))
7088 return END_BUILTINS;
7090 arg = next_const_call_expr_arg (&iter);
7091 argtype = TREE_TYPE (arg);
7093 if (SCALAR_FLOAT_TYPE_P (parmtype))
7095 if (! SCALAR_FLOAT_TYPE_P (argtype))
7096 return END_BUILTINS;
7098 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7100 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7101 return END_BUILTINS;
7103 else if (POINTER_TYPE_P (parmtype))
7105 if (! POINTER_TYPE_P (argtype))
7106 return END_BUILTINS;
7108 else if (INTEGRAL_TYPE_P (parmtype))
7110 if (! INTEGRAL_TYPE_P (argtype))
7111 return END_BUILTINS;
7114 return END_BUILTINS;
7117 /* Variable-length argument list. */
7118 return DECL_FUNCTION_CODE (fndecl);
7121 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7122 evaluate to a constant. */
7125 fold_builtin_constant_p (tree arg)
7127 /* We return 1 for a numeric type that's known to be a constant
7128 value at compile-time or for an aggregate type that's a
7129 literal constant. */
7132 /* If we know this is a constant, emit the constant of one. */
7133 if (CONSTANT_CLASS_P (arg)
7134 || (TREE_CODE (arg) == CONSTRUCTOR
7135 && TREE_CONSTANT (arg)))
7136 return integer_one_node;
7137 if (TREE_CODE (arg) == ADDR_EXPR)
7139 tree op = TREE_OPERAND (arg, 0);
7140 if (TREE_CODE (op) == STRING_CST
7141 || (TREE_CODE (op) == ARRAY_REF
7142 && integer_zerop (TREE_OPERAND (op, 1))
7143 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7144 return integer_one_node;
7147 /* If this expression has side effects, show we don't know it to be a
7148 constant. Likewise if it's a pointer or aggregate type since in
7149 those case we only want literals, since those are only optimized
7150 when generating RTL, not later.
7151 And finally, if we are compiling an initializer, not code, we
7152 need to return a definite result now; there's not going to be any
7153 more optimization done. */
7154 if (TREE_SIDE_EFFECTS (arg)
7155 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7156 || POINTER_TYPE_P (TREE_TYPE (arg))
7158 || folding_initializer)
7159 return integer_zero_node;
7164 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7165 return it as a truthvalue. */
7168 build_builtin_expect_predicate (tree pred, tree expected)
7170 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7172 fn = built_in_decls[BUILT_IN_EXPECT];
7173 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7174 ret_type = TREE_TYPE (TREE_TYPE (fn));
7175 pred_type = TREE_VALUE (arg_types);
7176 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7178 pred = fold_convert (pred_type, pred);
7179 expected = fold_convert (expected_type, expected);
7180 call_expr = build_call_expr (fn, 2, pred, expected);
7182 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7183 build_int_cst (ret_type, 0));
7186 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7187 NULL_TREE if no simplification is possible. */
7190 fold_builtin_expect (tree arg0, tree arg1)
7193 enum tree_code code;
7195 /* If this is a builtin_expect within a builtin_expect keep the
7196 inner one. See through a comparison against a constant. It
7197 might have been added to create a thruthvalue. */
7199 if (COMPARISON_CLASS_P (inner)
7200 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7201 inner = TREE_OPERAND (inner, 0);
7203 if (TREE_CODE (inner) == CALL_EXPR
7204 && (fndecl = get_callee_fndecl (inner))
7205 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7206 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7209 /* Distribute the expected value over short-circuiting operators.
7210 See through the cast from truthvalue_type_node to long. */
7212 while (TREE_CODE (inner) == NOP_EXPR
7213 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7214 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7215 inner = TREE_OPERAND (inner, 0);
7217 code = TREE_CODE (inner);
7218 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7220 tree op0 = TREE_OPERAND (inner, 0);
7221 tree op1 = TREE_OPERAND (inner, 1);
7223 op0 = build_builtin_expect_predicate (op0, arg1);
7224 op1 = build_builtin_expect_predicate (op1, arg1);
7225 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7227 return fold_convert (TREE_TYPE (arg0), inner);
7230 /* If the argument isn't invariant then there's nothing else we can do. */
7231 if (!TREE_CONSTANT (arg0))
7234 /* If we expect that a comparison against the argument will fold to
7235 a constant return the constant. In practice, this means a true
7236 constant or the address of a non-weak symbol. */
7239 if (TREE_CODE (inner) == ADDR_EXPR)
7243 inner = TREE_OPERAND (inner, 0);
7245 while (TREE_CODE (inner) == COMPONENT_REF
7246 || TREE_CODE (inner) == ARRAY_REF);
7247 if (DECL_P (inner) && DECL_WEAK (inner))
7251 /* Otherwise, ARG0 already has the proper type for the return value. */
7255 /* Fold a call to __builtin_classify_type with argument ARG. */
7258 fold_builtin_classify_type (tree arg)
7261 return build_int_cst (NULL_TREE, no_type_class);
7263 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7266 /* Fold a call to __builtin_strlen with argument ARG. */
7269 fold_builtin_strlen (tree arg)
7271 if (!validate_arg (arg, POINTER_TYPE))
7275 tree len = c_strlen (arg, 0);
7279 /* Convert from the internal "sizetype" type to "size_t". */
7281 len = fold_convert (size_type_node, len);
7289 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7292 fold_builtin_inf (tree type, int warn)
7294 REAL_VALUE_TYPE real;
7296 /* __builtin_inff is intended to be usable to define INFINITY on all
7297 targets. If an infinity is not available, INFINITY expands "to a
7298 positive constant of type float that overflows at translation
7299 time", footnote "In this case, using INFINITY will violate the
7300 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7301 Thus we pedwarn to ensure this constraint violation is
7303 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7304 pedwarn (input_location, 0, "target format does not support infinity");
7307 return build_real (type, real);
7310 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7313 fold_builtin_nan (tree arg, tree type, int quiet)
7315 REAL_VALUE_TYPE real;
7318 if (!validate_arg (arg, POINTER_TYPE))
7320 str = c_getstr (arg);
7324 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7327 return build_real (type, real);
7330 /* Return true if the floating point expression T has an integer value.
7331 We also allow +Inf, -Inf and NaN to be considered integer values. */
7334 integer_valued_real_p (tree t)
7336 switch (TREE_CODE (t))
7343 return integer_valued_real_p (TREE_OPERAND (t, 0));
7348 return integer_valued_real_p (TREE_OPERAND (t, 1));
7355 return integer_valued_real_p (TREE_OPERAND (t, 0))
7356 && integer_valued_real_p (TREE_OPERAND (t, 1));
7359 return integer_valued_real_p (TREE_OPERAND (t, 1))
7360 && integer_valued_real_p (TREE_OPERAND (t, 2));
7363 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7367 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7368 if (TREE_CODE (type) == INTEGER_TYPE)
7370 if (TREE_CODE (type) == REAL_TYPE)
7371 return integer_valued_real_p (TREE_OPERAND (t, 0));
7376 switch (builtin_mathfn_code (t))
7378 CASE_FLT_FN (BUILT_IN_CEIL):
7379 CASE_FLT_FN (BUILT_IN_FLOOR):
7380 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7381 CASE_FLT_FN (BUILT_IN_RINT):
7382 CASE_FLT_FN (BUILT_IN_ROUND):
7383 CASE_FLT_FN (BUILT_IN_TRUNC):
7386 CASE_FLT_FN (BUILT_IN_FMIN):
7387 CASE_FLT_FN (BUILT_IN_FMAX):
7388 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7389 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7402 /* FNDECL is assumed to be a builtin where truncation can be propagated
7403 across (for instance floor((double)f) == (double)floorf (f).
7404 Do the transformation for a call with argument ARG. */
7407 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7409 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7411 if (!validate_arg (arg, REAL_TYPE))
7414 /* Integer rounding functions are idempotent. */
7415 if (fcode == builtin_mathfn_code (arg))
7418 /* If argument is already integer valued, and we don't need to worry
7419 about setting errno, there's no need to perform rounding. */
7420 if (! flag_errno_math && integer_valued_real_p (arg))
7425 tree arg0 = strip_float_extensions (arg);
7426 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7427 tree newtype = TREE_TYPE (arg0);
7430 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7431 && (decl = mathfn_built_in (newtype, fcode)))
7432 return fold_convert (ftype,
7433 build_call_expr (decl, 1,
7434 fold_convert (newtype, arg0)));
7439 /* FNDECL is assumed to be builtin which can narrow the FP type of
7440 the argument, for instance lround((double)f) -> lroundf (f).
7441 Do the transformation for a call with argument ARG. */
7444 fold_fixed_mathfn (tree fndecl, tree arg)
7446 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7448 if (!validate_arg (arg, REAL_TYPE))
7451 /* If argument is already integer valued, and we don't need to worry
7452 about setting errno, there's no need to perform rounding. */
7453 if (! flag_errno_math && integer_valued_real_p (arg))
7454 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7458 tree ftype = TREE_TYPE (arg);
7459 tree arg0 = strip_float_extensions (arg);
7460 tree newtype = TREE_TYPE (arg0);
7463 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7464 && (decl = mathfn_built_in (newtype, fcode)))
7465 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7468 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7469 sizeof (long long) == sizeof (long). */
7470 if (TYPE_PRECISION (long_long_integer_type_node)
7471 == TYPE_PRECISION (long_integer_type_node))
7473 tree newfn = NULL_TREE;
7476 CASE_FLT_FN (BUILT_IN_LLCEIL):
7477 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7480 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7481 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7484 CASE_FLT_FN (BUILT_IN_LLROUND):
7485 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7488 CASE_FLT_FN (BUILT_IN_LLRINT):
7489 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7498 tree newcall = build_call_expr(newfn, 1, arg);
7499 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7506 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7507 return type. Return NULL_TREE if no simplification can be made. */
7510 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7514 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7515 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7518 /* Calculate the result when the argument is a constant. */
7519 if (TREE_CODE (arg) == COMPLEX_CST
7520 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7524 if (TREE_CODE (arg) == COMPLEX_EXPR)
7526 tree real = TREE_OPERAND (arg, 0);
7527 tree imag = TREE_OPERAND (arg, 1);
7529 /* If either part is zero, cabs is fabs of the other. */
7530 if (real_zerop (real))
7531 return fold_build1 (ABS_EXPR, type, imag);
7532 if (real_zerop (imag))
7533 return fold_build1 (ABS_EXPR, type, real);
7535 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7536 if (flag_unsafe_math_optimizations
7537 && operand_equal_p (real, imag, OEP_PURE_SAME))
7539 const REAL_VALUE_TYPE sqrt2_trunc
7540 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7542 return fold_build2 (MULT_EXPR, type,
7543 fold_build1 (ABS_EXPR, type, real),
7544 build_real (type, sqrt2_trunc));
7548 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7549 if (TREE_CODE (arg) == NEGATE_EXPR
7550 || TREE_CODE (arg) == CONJ_EXPR)
7551 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7553 /* Don't do this when optimizing for size. */
7554 if (flag_unsafe_math_optimizations
7555 && optimize && optimize_function_for_speed_p (cfun))
7557 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7559 if (sqrtfn != NULL_TREE)
7561 tree rpart, ipart, result;
7563 arg = builtin_save_expr (arg);
7565 rpart = fold_build1 (REALPART_EXPR, type, arg);
7566 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7568 rpart = builtin_save_expr (rpart);
7569 ipart = builtin_save_expr (ipart);
7571 result = fold_build2 (PLUS_EXPR, type,
7572 fold_build2 (MULT_EXPR, type,
7574 fold_build2 (MULT_EXPR, type,
7577 return build_call_expr (sqrtfn, 1, result);
7584 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7585 Return NULL_TREE if no simplification can be made. */
7588 fold_builtin_sqrt (tree arg, tree type)
7591 enum built_in_function fcode;
7594 if (!validate_arg (arg, REAL_TYPE))
7597 /* Calculate the result when the argument is a constant. */
7598 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7601 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7602 fcode = builtin_mathfn_code (arg);
7603 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7605 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7606 arg = fold_build2 (MULT_EXPR, type,
7607 CALL_EXPR_ARG (arg, 0),
7608 build_real (type, dconsthalf));
7609 return build_call_expr (expfn, 1, arg);
7612 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7613 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7615 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7619 tree arg0 = CALL_EXPR_ARG (arg, 0);
7621 /* The inner root was either sqrt or cbrt. */
7622 REAL_VALUE_TYPE dconstroot =
7623 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconst_third ();
7625 /* Adjust for the outer root. */
7626 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7627 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7628 tree_root = build_real (type, dconstroot);
7629 return build_call_expr (powfn, 2, arg0, tree_root);
7633 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7634 if (flag_unsafe_math_optimizations
7635 && (fcode == BUILT_IN_POW
7636 || fcode == BUILT_IN_POWF
7637 || fcode == BUILT_IN_POWL))
7639 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7640 tree arg0 = CALL_EXPR_ARG (arg, 0);
7641 tree arg1 = CALL_EXPR_ARG (arg, 1);
7643 if (!tree_expr_nonnegative_p (arg0))
7644 arg0 = build1 (ABS_EXPR, type, arg0);
7645 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7646 build_real (type, dconsthalf));
7647 return build_call_expr (powfn, 2, arg0, narg1);
7653 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7654 Return NULL_TREE if no simplification can be made. */
7657 fold_builtin_cbrt (tree arg, tree type)
7659 const enum built_in_function fcode = builtin_mathfn_code (arg);
7662 if (!validate_arg (arg, REAL_TYPE))
7665 /* Calculate the result when the argument is a constant. */
7666 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7669 if (flag_unsafe_math_optimizations)
7671 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7672 if (BUILTIN_EXPONENT_P (fcode))
7674 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7675 const REAL_VALUE_TYPE third_trunc =
7676 real_value_truncate (TYPE_MODE (type), dconst_third ());
7677 arg = fold_build2 (MULT_EXPR, type,
7678 CALL_EXPR_ARG (arg, 0),
7679 build_real (type, third_trunc));
7680 return build_call_expr (expfn, 1, arg);
7683 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7684 if (BUILTIN_SQRT_P (fcode))
7686 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7690 tree arg0 = CALL_EXPR_ARG (arg, 0);
7692 REAL_VALUE_TYPE dconstroot = dconst_third ();
7694 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7695 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7696 tree_root = build_real (type, dconstroot);
7697 return build_call_expr (powfn, 2, arg0, tree_root);
7701 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7702 if (BUILTIN_CBRT_P (fcode))
7704 tree arg0 = CALL_EXPR_ARG (arg, 0);
7705 if (tree_expr_nonnegative_p (arg0))
7707 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7712 REAL_VALUE_TYPE dconstroot;
7714 real_arithmetic (&dconstroot, MULT_EXPR,
7715 dconst_third_ptr (), dconst_third_ptr ());
7716 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7717 tree_root = build_real (type, dconstroot);
7718 return build_call_expr (powfn, 2, arg0, tree_root);
7723 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7724 if (fcode == BUILT_IN_POW
7725 || fcode == BUILT_IN_POWF
7726 || fcode == BUILT_IN_POWL)
7728 tree arg00 = CALL_EXPR_ARG (arg, 0);
7729 tree arg01 = CALL_EXPR_ARG (arg, 1);
7730 if (tree_expr_nonnegative_p (arg00))
7732 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7733 const REAL_VALUE_TYPE dconstroot
7734 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7735 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7736 build_real (type, dconstroot));
7737 return build_call_expr (powfn, 2, arg00, narg01);
7744 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7745 TYPE is the type of the return value. Return NULL_TREE if no
7746 simplification can be made. */
7749 fold_builtin_cos (tree arg, tree type, tree fndecl)
7753 if (!validate_arg (arg, REAL_TYPE))
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7760 /* Optimize cos(-x) into cos (x). */
7761 if ((narg = fold_strip_sign_ops (arg)))
7762 return build_call_expr (fndecl, 1, narg);
7767 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7768 Return NULL_TREE if no simplification can be made. */
7771 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7773 if (validate_arg (arg, REAL_TYPE))
7777 /* Calculate the result when the argument is a constant. */
7778 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7781 /* Optimize cosh(-x) into cosh (x). */
7782 if ((narg = fold_strip_sign_ops (arg)))
7783 return build_call_expr (fndecl, 1, narg);
7789 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7790 Return NULL_TREE if no simplification can be made. */
7793 fold_builtin_tan (tree arg, tree type)
7795 enum built_in_function fcode;
7798 if (!validate_arg (arg, REAL_TYPE))
7801 /* Calculate the result when the argument is a constant. */
7802 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7805 /* Optimize tan(atan(x)) = x. */
7806 fcode = builtin_mathfn_code (arg);
7807 if (flag_unsafe_math_optimizations
7808 && (fcode == BUILT_IN_ATAN
7809 || fcode == BUILT_IN_ATANF
7810 || fcode == BUILT_IN_ATANL))
7811 return CALL_EXPR_ARG (arg, 0);
7816 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7817 NULL_TREE if no simplification can be made. */
7820 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7825 if (!validate_arg (arg0, REAL_TYPE)
7826 || !validate_arg (arg1, POINTER_TYPE)
7827 || !validate_arg (arg2, POINTER_TYPE))
7830 type = TREE_TYPE (arg0);
7832 /* Calculate the result when the argument is a constant. */
7833 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7836 /* Canonicalize sincos to cexpi. */
7837 if (!TARGET_C99_FUNCTIONS)
7839 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7843 call = build_call_expr (fn, 1, arg0);
7844 call = builtin_save_expr (call);
7846 return build2 (COMPOUND_EXPR, type,
7847 build2 (MODIFY_EXPR, void_type_node,
7848 build_fold_indirect_ref (arg1),
7849 build1 (IMAGPART_EXPR, type, call)),
7850 build2 (MODIFY_EXPR, void_type_node,
7851 build_fold_indirect_ref (arg2),
7852 build1 (REALPART_EXPR, type, call)));
7855 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7856 NULL_TREE if no simplification can be made. */
7859 fold_builtin_cexp (tree arg0, tree type)
7862 tree realp, imagp, ifn;
7864 if (!validate_arg (arg0, COMPLEX_TYPE))
7867 rtype = TREE_TYPE (TREE_TYPE (arg0));
7869 /* In case we can figure out the real part of arg0 and it is constant zero
7871 if (!TARGET_C99_FUNCTIONS)
7873 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7877 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7878 && real_zerop (realp))
7880 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7881 return build_call_expr (ifn, 1, narg);
7884 /* In case we can easily decompose real and imaginary parts split cexp
7885 to exp (r) * cexpi (i). */
7886 if (flag_unsafe_math_optimizations
7889 tree rfn, rcall, icall;
7891 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7895 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7899 icall = build_call_expr (ifn, 1, imagp);
7900 icall = builtin_save_expr (icall);
7901 rcall = build_call_expr (rfn, 1, realp);
7902 rcall = builtin_save_expr (rcall);
7903 return fold_build2 (COMPLEX_EXPR, type,
7904 fold_build2 (MULT_EXPR, rtype,
7906 fold_build1 (REALPART_EXPR, rtype, icall)),
7907 fold_build2 (MULT_EXPR, rtype,
7909 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7915 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7916 Return NULL_TREE if no simplification can be made. */
7919 fold_builtin_trunc (tree fndecl, tree arg)
7921 if (!validate_arg (arg, REAL_TYPE))
7924 /* Optimize trunc of constant value. */
7925 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7927 REAL_VALUE_TYPE r, x;
7928 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7930 x = TREE_REAL_CST (arg);
7931 real_trunc (&r, TYPE_MODE (type), &x);
7932 return build_real (type, r);
7935 return fold_trunc_transparent_mathfn (fndecl, arg);
7938 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7939 Return NULL_TREE if no simplification can be made. */
7942 fold_builtin_floor (tree fndecl, tree arg)
7944 if (!validate_arg (arg, REAL_TYPE))
7947 /* Optimize floor of constant value. */
7948 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7952 x = TREE_REAL_CST (arg);
7953 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7955 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7958 real_floor (&r, TYPE_MODE (type), &x);
7959 return build_real (type, r);
7963 /* Fold floor (x) where x is nonnegative to trunc (x). */
7964 if (tree_expr_nonnegative_p (arg))
7966 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7968 return build_call_expr (truncfn, 1, arg);
7971 return fold_trunc_transparent_mathfn (fndecl, arg);
7974 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7975 Return NULL_TREE if no simplification can be made. */
7978 fold_builtin_ceil (tree fndecl, tree arg)
7980 if (!validate_arg (arg, REAL_TYPE))
7983 /* Optimize ceil of constant value. */
7984 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7988 x = TREE_REAL_CST (arg);
7989 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7991 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7994 real_ceil (&r, TYPE_MODE (type), &x);
7995 return build_real (type, r);
7999 return fold_trunc_transparent_mathfn (fndecl, arg);
8002 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8003 Return NULL_TREE if no simplification can be made. */
8006 fold_builtin_round (tree fndecl, tree arg)
8008 if (!validate_arg (arg, REAL_TYPE))
8011 /* Optimize round of constant value. */
8012 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8016 x = TREE_REAL_CST (arg);
8017 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8019 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8022 real_round (&r, TYPE_MODE (type), &x);
8023 return build_real (type, r);
8027 return fold_trunc_transparent_mathfn (fndecl, arg);
8030 /* Fold function call to builtin lround, lroundf or lroundl (or the
8031 corresponding long long versions) and other rounding functions. ARG
8032 is the argument to the call. Return NULL_TREE if no simplification
8036 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8038 if (!validate_arg (arg, REAL_TYPE))
8041 /* Optimize lround of constant value. */
8042 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8044 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8046 if (real_isfinite (&x))
8048 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8049 tree ftype = TREE_TYPE (arg);
8050 unsigned HOST_WIDE_INT lo2;
8051 HOST_WIDE_INT hi, lo;
8054 switch (DECL_FUNCTION_CODE (fndecl))
8056 CASE_FLT_FN (BUILT_IN_LFLOOR):
8057 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8058 real_floor (&r, TYPE_MODE (ftype), &x);
8061 CASE_FLT_FN (BUILT_IN_LCEIL):
8062 CASE_FLT_FN (BUILT_IN_LLCEIL):
8063 real_ceil (&r, TYPE_MODE (ftype), &x);
8066 CASE_FLT_FN (BUILT_IN_LROUND):
8067 CASE_FLT_FN (BUILT_IN_LLROUND):
8068 real_round (&r, TYPE_MODE (ftype), &x);
8075 REAL_VALUE_TO_INT (&lo, &hi, r);
8076 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8077 return build_int_cst_wide (itype, lo2, hi);
8081 switch (DECL_FUNCTION_CODE (fndecl))
8083 CASE_FLT_FN (BUILT_IN_LFLOOR):
8084 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8085 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8086 if (tree_expr_nonnegative_p (arg))
8087 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8093 return fold_fixed_mathfn (fndecl, arg);
8096 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8097 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8098 the argument to the call. Return NULL_TREE if no simplification can
8102 fold_builtin_bitop (tree fndecl, tree arg)
8104 if (!validate_arg (arg, INTEGER_TYPE))
8107 /* Optimize for constant argument. */
8108 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8110 HOST_WIDE_INT hi, width, result;
8111 unsigned HOST_WIDE_INT lo;
8114 type = TREE_TYPE (arg);
8115 width = TYPE_PRECISION (type);
8116 lo = TREE_INT_CST_LOW (arg);
8118 /* Clear all the bits that are beyond the type's precision. */
8119 if (width > HOST_BITS_PER_WIDE_INT)
8121 hi = TREE_INT_CST_HIGH (arg);
8122 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8123 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8128 if (width < HOST_BITS_PER_WIDE_INT)
8129 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8132 switch (DECL_FUNCTION_CODE (fndecl))
8134 CASE_INT_FN (BUILT_IN_FFS):
8136 result = exact_log2 (lo & -lo) + 1;
8138 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8143 CASE_INT_FN (BUILT_IN_CLZ):
8145 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8147 result = width - floor_log2 (lo) - 1;
8148 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8152 CASE_INT_FN (BUILT_IN_CTZ):
8154 result = exact_log2 (lo & -lo);
8156 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8157 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8161 CASE_INT_FN (BUILT_IN_POPCOUNT):
8164 result++, lo &= lo - 1;
8166 result++, hi &= hi - 1;
8169 CASE_INT_FN (BUILT_IN_PARITY):
8172 result++, lo &= lo - 1;
8174 result++, hi &= hi - 1;
8182 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8188 /* Fold function call to builtin_bswap and the long and long long
8189 variants. Return NULL_TREE if no simplification can be made. */
8191 fold_builtin_bswap (tree fndecl, tree arg)
8193 if (! validate_arg (arg, INTEGER_TYPE))
8196 /* Optimize constant value. */
8197 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8199 HOST_WIDE_INT hi, width, r_hi = 0;
8200 unsigned HOST_WIDE_INT lo, r_lo = 0;
8203 type = TREE_TYPE (arg);
8204 width = TYPE_PRECISION (type);
8205 lo = TREE_INT_CST_LOW (arg);
8206 hi = TREE_INT_CST_HIGH (arg);
8208 switch (DECL_FUNCTION_CODE (fndecl))
8210 case BUILT_IN_BSWAP32:
8211 case BUILT_IN_BSWAP64:
8215 for (s = 0; s < width; s += 8)
8217 int d = width - s - 8;
8218 unsigned HOST_WIDE_INT byte;
8220 if (s < HOST_BITS_PER_WIDE_INT)
8221 byte = (lo >> s) & 0xff;
8223 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8225 if (d < HOST_BITS_PER_WIDE_INT)
8228 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8238 if (width < HOST_BITS_PER_WIDE_INT)
8239 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8241 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8247 /* Return true if EXPR is the real constant contained in VALUE. */
8250 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8254 return ((TREE_CODE (expr) == REAL_CST
8255 && !TREE_OVERFLOW (expr)
8256 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8257 || (TREE_CODE (expr) == COMPLEX_CST
8258 && real_dconstp (TREE_REALPART (expr), value)
8259 && real_zerop (TREE_IMAGPART (expr))));
8262 /* A subroutine of fold_builtin to fold the various logarithmic
8263 functions. Return NULL_TREE if no simplification can me made.
8264 FUNC is the corresponding MPFR logarithm function. */
8267 fold_builtin_logarithm (tree fndecl, tree arg,
8268 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8270 if (validate_arg (arg, REAL_TYPE))
8272 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8274 const enum built_in_function fcode = builtin_mathfn_code (arg);
8276 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8277 instead we'll look for 'e' truncated to MODE. So only do
8278 this if flag_unsafe_math_optimizations is set. */
8279 if (flag_unsafe_math_optimizations && func == mpfr_log)
8281 const REAL_VALUE_TYPE e_truncated =
8282 real_value_truncate (TYPE_MODE (type), dconst_e ());
8283 if (real_dconstp (arg, &e_truncated))
8284 return build_real (type, dconst1);
8287 /* Calculate the result when the argument is a constant. */
8288 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8291 /* Special case, optimize logN(expN(x)) = x. */
8292 if (flag_unsafe_math_optimizations
8293 && ((func == mpfr_log
8294 && (fcode == BUILT_IN_EXP
8295 || fcode == BUILT_IN_EXPF
8296 || fcode == BUILT_IN_EXPL))
8297 || (func == mpfr_log2
8298 && (fcode == BUILT_IN_EXP2
8299 || fcode == BUILT_IN_EXP2F
8300 || fcode == BUILT_IN_EXP2L))
8301 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8302 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8304 /* Optimize logN(func()) for various exponential functions. We
8305 want to determine the value "x" and the power "exponent" in
8306 order to transform logN(x**exponent) into exponent*logN(x). */
8307 if (flag_unsafe_math_optimizations)
8309 tree exponent = 0, x = 0;
8313 CASE_FLT_FN (BUILT_IN_EXP):
8314 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8315 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8317 exponent = CALL_EXPR_ARG (arg, 0);
8319 CASE_FLT_FN (BUILT_IN_EXP2):
8320 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8321 x = build_real (type, dconst2);
8322 exponent = CALL_EXPR_ARG (arg, 0);
8324 CASE_FLT_FN (BUILT_IN_EXP10):
8325 CASE_FLT_FN (BUILT_IN_POW10):
8326 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8328 REAL_VALUE_TYPE dconst10;
8329 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8330 x = build_real (type, dconst10);
8332 exponent = CALL_EXPR_ARG (arg, 0);
8334 CASE_FLT_FN (BUILT_IN_SQRT):
8335 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8336 x = CALL_EXPR_ARG (arg, 0);
8337 exponent = build_real (type, dconsthalf);
8339 CASE_FLT_FN (BUILT_IN_CBRT):
8340 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8341 x = CALL_EXPR_ARG (arg, 0);
8342 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8345 CASE_FLT_FN (BUILT_IN_POW):
8346 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8347 x = CALL_EXPR_ARG (arg, 0);
8348 exponent = CALL_EXPR_ARG (arg, 1);
8354 /* Now perform the optimization. */
8357 tree logfn = build_call_expr (fndecl, 1, x);
8358 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8366 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8367 NULL_TREE if no simplification can be made. */
8370 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8372 tree res, narg0, narg1;
8374 if (!validate_arg (arg0, REAL_TYPE)
8375 || !validate_arg (arg1, REAL_TYPE))
8378 /* Calculate the result when the argument is a constant. */
8379 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8382 /* If either argument to hypot has a negate or abs, strip that off.
8383 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8384 narg0 = fold_strip_sign_ops (arg0);
8385 narg1 = fold_strip_sign_ops (arg1);
8388 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8389 narg1 ? narg1 : arg1);
8392 /* If either argument is zero, hypot is fabs of the other. */
8393 if (real_zerop (arg0))
8394 return fold_build1 (ABS_EXPR, type, arg1);
8395 else if (real_zerop (arg1))
8396 return fold_build1 (ABS_EXPR, type, arg0);
8398 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8399 if (flag_unsafe_math_optimizations
8400 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8402 const REAL_VALUE_TYPE sqrt2_trunc
8403 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8404 return fold_build2 (MULT_EXPR, type,
8405 fold_build1 (ABS_EXPR, type, arg0),
8406 build_real (type, sqrt2_trunc));
8413 /* Fold a builtin function call to pow, powf, or powl. Return
8414 NULL_TREE if no simplification can be made. */
8416 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8420 if (!validate_arg (arg0, REAL_TYPE)
8421 || !validate_arg (arg1, REAL_TYPE))
8424 /* Calculate the result when the argument is a constant. */
8425 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8428 /* Optimize pow(1.0,y) = 1.0. */
8429 if (real_onep (arg0))
8430 return omit_one_operand (type, build_real (type, dconst1), arg1);
8432 if (TREE_CODE (arg1) == REAL_CST
8433 && !TREE_OVERFLOW (arg1))
8435 REAL_VALUE_TYPE cint;
8439 c = TREE_REAL_CST (arg1);
8441 /* Optimize pow(x,0.0) = 1.0. */
8442 if (REAL_VALUES_EQUAL (c, dconst0))
8443 return omit_one_operand (type, build_real (type, dconst1),
8446 /* Optimize pow(x,1.0) = x. */
8447 if (REAL_VALUES_EQUAL (c, dconst1))
8450 /* Optimize pow(x,-1.0) = 1.0/x. */
8451 if (REAL_VALUES_EQUAL (c, dconstm1))
8452 return fold_build2 (RDIV_EXPR, type,
8453 build_real (type, dconst1), arg0);
8455 /* Optimize pow(x,0.5) = sqrt(x). */
8456 if (flag_unsafe_math_optimizations
8457 && REAL_VALUES_EQUAL (c, dconsthalf))
8459 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8461 if (sqrtfn != NULL_TREE)
8462 return build_call_expr (sqrtfn, 1, arg0);
8465 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8466 if (flag_unsafe_math_optimizations)
8468 const REAL_VALUE_TYPE dconstroot
8469 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8471 if (REAL_VALUES_EQUAL (c, dconstroot))
8473 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8474 if (cbrtfn != NULL_TREE)
8475 return build_call_expr (cbrtfn, 1, arg0);
8479 /* Check for an integer exponent. */
8480 n = real_to_integer (&c);
8481 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8482 if (real_identical (&c, &cint))
8484 /* Attempt to evaluate pow at compile-time, unless this should
8485 raise an exception. */
8486 if (TREE_CODE (arg0) == REAL_CST
8487 && !TREE_OVERFLOW (arg0)
8489 || (!flag_trapping_math && !flag_errno_math)
8490 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8495 x = TREE_REAL_CST (arg0);
8496 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8497 if (flag_unsafe_math_optimizations || !inexact)
8498 return build_real (type, x);
8501 /* Strip sign ops from even integer powers. */
8502 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8504 tree narg0 = fold_strip_sign_ops (arg0);
8506 return build_call_expr (fndecl, 2, narg0, arg1);
8511 if (flag_unsafe_math_optimizations)
8513 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8515 /* Optimize pow(expN(x),y) = expN(x*y). */
8516 if (BUILTIN_EXPONENT_P (fcode))
8518 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8519 tree arg = CALL_EXPR_ARG (arg0, 0);
8520 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8521 return build_call_expr (expfn, 1, arg);
8524 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8525 if (BUILTIN_SQRT_P (fcode))
8527 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8528 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8529 build_real (type, dconsthalf));
8530 return build_call_expr (fndecl, 2, narg0, narg1);
8533 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8534 if (BUILTIN_CBRT_P (fcode))
8536 tree arg = CALL_EXPR_ARG (arg0, 0);
8537 if (tree_expr_nonnegative_p (arg))
8539 const REAL_VALUE_TYPE dconstroot
8540 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8541 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8542 build_real (type, dconstroot));
8543 return build_call_expr (fndecl, 2, arg, narg1);
8547 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8548 if (fcode == BUILT_IN_POW
8549 || fcode == BUILT_IN_POWF
8550 || fcode == BUILT_IN_POWL)
8552 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8553 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8554 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8555 return build_call_expr (fndecl, 2, arg00, narg1);
8562 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8563 Return NULL_TREE if no simplification can be made. */
8565 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8566 tree arg0, tree arg1, tree type)
8568 if (!validate_arg (arg0, REAL_TYPE)
8569 || !validate_arg (arg1, INTEGER_TYPE))
8572 /* Optimize pow(1.0,y) = 1.0. */
8573 if (real_onep (arg0))
8574 return omit_one_operand (type, build_real (type, dconst1), arg1);
8576 if (host_integerp (arg1, 0))
8578 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8580 /* Evaluate powi at compile-time. */
8581 if (TREE_CODE (arg0) == REAL_CST
8582 && !TREE_OVERFLOW (arg0))
8585 x = TREE_REAL_CST (arg0);
8586 real_powi (&x, TYPE_MODE (type), &x, c);
8587 return build_real (type, x);
8590 /* Optimize pow(x,0) = 1.0. */
8592 return omit_one_operand (type, build_real (type, dconst1),
8595 /* Optimize pow(x,1) = x. */
8599 /* Optimize pow(x,-1) = 1.0/x. */
8601 return fold_build2 (RDIV_EXPR, type,
8602 build_real (type, dconst1), arg0);
8608 /* A subroutine of fold_builtin to fold the various exponent
8609 functions. Return NULL_TREE if no simplification can be made.
8610 FUNC is the corresponding MPFR exponent function. */
8613 fold_builtin_exponent (tree fndecl, tree arg,
8614 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8616 if (validate_arg (arg, REAL_TYPE))
8618 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8621 /* Calculate the result when the argument is a constant. */
8622 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8625 /* Optimize expN(logN(x)) = x. */
8626 if (flag_unsafe_math_optimizations)
8628 const enum built_in_function fcode = builtin_mathfn_code (arg);
8630 if ((func == mpfr_exp
8631 && (fcode == BUILT_IN_LOG
8632 || fcode == BUILT_IN_LOGF
8633 || fcode == BUILT_IN_LOGL))
8634 || (func == mpfr_exp2
8635 && (fcode == BUILT_IN_LOG2
8636 || fcode == BUILT_IN_LOG2F
8637 || fcode == BUILT_IN_LOG2L))
8638 || (func == mpfr_exp10
8639 && (fcode == BUILT_IN_LOG10
8640 || fcode == BUILT_IN_LOG10F
8641 || fcode == BUILT_IN_LOG10L)))
8642 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8649 /* Return true if VAR is a VAR_DECL or a component thereof. */
8652 var_decl_component_p (tree var)
8655 while (handled_component_p (inner))
8656 inner = TREE_OPERAND (inner, 0);
8657 return SSA_VAR_P (inner);
8660 /* Fold function call to builtin memset. Return
8661 NULL_TREE if no simplification can be made. */
8664 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8667 unsigned HOST_WIDE_INT length, cval;
8669 if (! validate_arg (dest, POINTER_TYPE)
8670 || ! validate_arg (c, INTEGER_TYPE)
8671 || ! validate_arg (len, INTEGER_TYPE))
8674 if (! host_integerp (len, 1))
8677 /* If the LEN parameter is zero, return DEST. */
8678 if (integer_zerop (len))
8679 return omit_one_operand (type, dest, c);
8681 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8686 if (TREE_CODE (var) != ADDR_EXPR)
8689 var = TREE_OPERAND (var, 0);
8690 if (TREE_THIS_VOLATILE (var))
8693 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8694 && !POINTER_TYPE_P (TREE_TYPE (var)))
8697 if (! var_decl_component_p (var))
8700 length = tree_low_cst (len, 1);
8701 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8702 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8706 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8709 if (integer_zerop (c))
8713 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8716 cval = tree_low_cst (c, 1);
8720 cval |= (cval << 31) << 1;
8723 ret = build_int_cst_type (TREE_TYPE (var), cval);
8724 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8728 return omit_one_operand (type, dest, ret);
8731 /* Fold function call to builtin memset. Return
8732 NULL_TREE if no simplification can be made. */
8735 fold_builtin_bzero (tree dest, tree size, bool ignore)
8737 if (! validate_arg (dest, POINTER_TYPE)
8738 || ! validate_arg (size, INTEGER_TYPE))
8744 /* New argument list transforming bzero(ptr x, int y) to
8745 memset(ptr x, int 0, size_t y). This is done this way
8746 so that if it isn't expanded inline, we fallback to
8747 calling bzero instead of memset. */
8749 return fold_builtin_memset (dest, integer_zero_node,
8750 fold_convert (sizetype, size),
8751 void_type_node, ignore);
8754 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8755 NULL_TREE if no simplification can be made.
8756 If ENDP is 0, return DEST (like memcpy).
8757 If ENDP is 1, return DEST+LEN (like mempcpy).
8758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8763 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8765 tree destvar, srcvar, expr;
8767 if (! validate_arg (dest, POINTER_TYPE)
8768 || ! validate_arg (src, POINTER_TYPE)
8769 || ! validate_arg (len, INTEGER_TYPE))
8772 /* If the LEN parameter is zero, return DEST. */
8773 if (integer_zerop (len))
8774 return omit_one_operand (type, dest, src);
8776 /* If SRC and DEST are the same (and not volatile), return
8777 DEST{,+LEN,+LEN-1}. */
8778 if (operand_equal_p (src, dest, 0))
8782 tree srctype, desttype;
8785 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8786 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8788 /* Both DEST and SRC must be pointer types.
8789 ??? This is what old code did. Is the testing for pointer types
8792 If either SRC is readonly or length is 1, we can use memcpy. */
8793 if (dest_align && src_align
8794 && (readonly_data_expr (src)
8795 || (host_integerp (len, 1)
8796 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8797 tree_low_cst (len, 1)))))
8799 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8802 return build_call_expr (fn, 3, dest, src, len);
8807 if (!host_integerp (len, 0))
8810 This logic lose for arguments like (type *)malloc (sizeof (type)),
8811 since we strip the casts of up to VOID return value from malloc.
8812 Perhaps we ought to inherit type from non-VOID argument here? */
8815 srctype = TREE_TYPE (TREE_TYPE (src));
8816 desttype = TREE_TYPE (TREE_TYPE (dest));
8817 if (!srctype || !desttype
8818 || !TYPE_SIZE_UNIT (srctype)
8819 || !TYPE_SIZE_UNIT (desttype)
8820 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8821 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8822 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8823 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8826 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8827 < (int) TYPE_ALIGN (desttype)
8828 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8829 < (int) TYPE_ALIGN (srctype)))
8833 dest = builtin_save_expr (dest);
8835 srcvar = build_fold_indirect_ref (src);
8836 if (TREE_THIS_VOLATILE (srcvar))
8838 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8840 /* With memcpy, it is possible to bypass aliasing rules, so without
8841 this check i.e. execute/20060930-2.c would be misoptimized, because
8842 it use conflicting alias set to hold argument for the memcpy call.
8843 This check is probably unnecessary with -fno-strict-aliasing.
8844 Similarly for destvar. See also PR29286. */
8845 if (!var_decl_component_p (srcvar)
8846 /* Accept: memcpy (*char_var, "test", 1); that simplify
8848 || is_gimple_min_invariant (srcvar)
8849 || readonly_data_expr (src))
8852 destvar = build_fold_indirect_ref (dest);
8853 if (TREE_THIS_VOLATILE (destvar))
8855 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8857 if (!var_decl_component_p (destvar))
8860 if (srctype == desttype
8861 || (gimple_in_ssa_p (cfun)
8862 && useless_type_conversion_p (desttype, srctype)))
8864 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8865 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8866 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8867 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8868 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8870 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8871 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8877 if (endp == 0 || endp == 3)
8878 return omit_one_operand (type, dest, expr);
8884 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8887 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8888 dest = fold_convert (type, dest);
8890 dest = omit_one_operand (type, dest, expr);
8894 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8895 If LEN is not NULL, it represents the length of the string to be
8896 copied. Return NULL_TREE if no simplification can be made. */
8899 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8903 if (!validate_arg (dest, POINTER_TYPE)
8904 || !validate_arg (src, POINTER_TYPE))
8907 /* If SRC and DEST are the same (and not volatile), return DEST. */
8908 if (operand_equal_p (src, dest, 0))
8909 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8911 if (optimize_function_for_size_p (cfun))
8914 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8920 len = c_strlen (src, 1);
8921 if (! len || TREE_SIDE_EFFECTS (len))
8925 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8926 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8927 build_call_expr (fn, 3, dest, src, len));
8930 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8931 If SLEN is not NULL, it represents the length of the source string.
8932 Return NULL_TREE if no simplification can be made. */
8935 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8939 if (!validate_arg (dest, POINTER_TYPE)
8940 || !validate_arg (src, POINTER_TYPE)
8941 || !validate_arg (len, INTEGER_TYPE))
8944 /* If the LEN parameter is zero, return DEST. */
8945 if (integer_zerop (len))
8946 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8948 /* We can't compare slen with len as constants below if len is not a
8950 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8954 slen = c_strlen (src, 1);
8956 /* Now, we must be passed a constant src ptr parameter. */
8957 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8960 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8962 /* We do not support simplification of this case, though we do
8963 support it when expanding trees into RTL. */
8964 /* FIXME: generate a call to __builtin_memset. */
8965 if (tree_int_cst_lt (slen, len))
8968 /* OK transform into builtin memcpy. */
8969 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8972 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8973 build_call_expr (fn, 3, dest, src, len));
8976 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8977 arguments to the call, and TYPE is its return type.
8978 Return NULL_TREE if no simplification can be made. */
8981 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8983 if (!validate_arg (arg1, POINTER_TYPE)
8984 || !validate_arg (arg2, INTEGER_TYPE)
8985 || !validate_arg (len, INTEGER_TYPE))
8991 if (TREE_CODE (arg2) != INTEGER_CST
8992 || !host_integerp (len, 1))
8995 p1 = c_getstr (arg1);
8996 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9002 if (target_char_cast (arg2, &c))
9005 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9008 return build_int_cst (TREE_TYPE (arg1), 0);
9010 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9012 return fold_convert (type, tem);
9018 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9019 Return NULL_TREE if no simplification can be made. */
9022 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9024 const char *p1, *p2;
9026 if (!validate_arg (arg1, POINTER_TYPE)
9027 || !validate_arg (arg2, POINTER_TYPE)
9028 || !validate_arg (len, INTEGER_TYPE))
9031 /* If the LEN parameter is zero, return zero. */
9032 if (integer_zerop (len))
9033 return omit_two_operands (integer_type_node, integer_zero_node,
9036 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9037 if (operand_equal_p (arg1, arg2, 0))
9038 return omit_one_operand (integer_type_node, integer_zero_node, len);
9040 p1 = c_getstr (arg1);
9041 p2 = c_getstr (arg2);
9043 /* If all arguments are constant, and the value of len is not greater
9044 than the lengths of arg1 and arg2, evaluate at compile-time. */
9045 if (host_integerp (len, 1) && p1 && p2
9046 && compare_tree_int (len, strlen (p1) + 1) <= 0
9047 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9049 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9052 return integer_one_node;
9054 return integer_minus_one_node;
9056 return integer_zero_node;
9059 /* If len parameter is one, return an expression corresponding to
9060 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9061 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9063 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9064 tree cst_uchar_ptr_node
9065 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9067 tree ind1 = fold_convert (integer_type_node,
9068 build1 (INDIRECT_REF, cst_uchar_node,
9069 fold_convert (cst_uchar_ptr_node,
9071 tree ind2 = fold_convert (integer_type_node,
9072 build1 (INDIRECT_REF, cst_uchar_node,
9073 fold_convert (cst_uchar_ptr_node,
9075 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9081 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9082 Return NULL_TREE if no simplification can be made. */
9085 fold_builtin_strcmp (tree arg1, tree arg2)
9087 const char *p1, *p2;
9089 if (!validate_arg (arg1, POINTER_TYPE)
9090 || !validate_arg (arg2, POINTER_TYPE))
9093 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9094 if (operand_equal_p (arg1, arg2, 0))
9095 return integer_zero_node;
9097 p1 = c_getstr (arg1);
9098 p2 = c_getstr (arg2);
9102 const int i = strcmp (p1, p2);
9104 return integer_minus_one_node;
9106 return integer_one_node;
9108 return integer_zero_node;
9111 /* If the second arg is "", return *(const unsigned char*)arg1. */
9112 if (p2 && *p2 == '\0')
9114 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9115 tree cst_uchar_ptr_node
9116 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9118 return fold_convert (integer_type_node,
9119 build1 (INDIRECT_REF, cst_uchar_node,
9120 fold_convert (cst_uchar_ptr_node,
9124 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9125 if (p1 && *p1 == '\0')
9127 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9128 tree cst_uchar_ptr_node
9129 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9131 tree temp = fold_convert (integer_type_node,
9132 build1 (INDIRECT_REF, cst_uchar_node,
9133 fold_convert (cst_uchar_ptr_node,
9135 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9141 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9142 Return NULL_TREE if no simplification can be made. */
9145 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9147 const char *p1, *p2;
9149 if (!validate_arg (arg1, POINTER_TYPE)
9150 || !validate_arg (arg2, POINTER_TYPE)
9151 || !validate_arg (len, INTEGER_TYPE))
9154 /* If the LEN parameter is zero, return zero. */
9155 if (integer_zerop (len))
9156 return omit_two_operands (integer_type_node, integer_zero_node,
9159 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9160 if (operand_equal_p (arg1, arg2, 0))
9161 return omit_one_operand (integer_type_node, integer_zero_node, len);
9163 p1 = c_getstr (arg1);
9164 p2 = c_getstr (arg2);
9166 if (host_integerp (len, 1) && p1 && p2)
9168 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9170 return integer_one_node;
9172 return integer_minus_one_node;
9174 return integer_zero_node;
9177 /* If the second arg is "", and the length is greater than zero,
9178 return *(const unsigned char*)arg1. */
9179 if (p2 && *p2 == '\0'
9180 && TREE_CODE (len) == INTEGER_CST
9181 && tree_int_cst_sgn (len) == 1)
9183 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9184 tree cst_uchar_ptr_node
9185 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9187 return fold_convert (integer_type_node,
9188 build1 (INDIRECT_REF, cst_uchar_node,
9189 fold_convert (cst_uchar_ptr_node,
9193 /* If the first arg is "", and the length is greater than zero,
9194 return -*(const unsigned char*)arg2. */
9195 if (p1 && *p1 == '\0'
9196 && TREE_CODE (len) == INTEGER_CST
9197 && tree_int_cst_sgn (len) == 1)
9199 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9200 tree cst_uchar_ptr_node
9201 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9203 tree temp = fold_convert (integer_type_node,
9204 build1 (INDIRECT_REF, cst_uchar_node,
9205 fold_convert (cst_uchar_ptr_node,
9207 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9210 /* If len parameter is one, return an expression corresponding to
9211 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9212 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9214 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9215 tree cst_uchar_ptr_node
9216 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9218 tree ind1 = fold_convert (integer_type_node,
9219 build1 (INDIRECT_REF, cst_uchar_node,
9220 fold_convert (cst_uchar_ptr_node,
9222 tree ind2 = fold_convert (integer_type_node,
9223 build1 (INDIRECT_REF, cst_uchar_node,
9224 fold_convert (cst_uchar_ptr_node,
9226 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9232 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9233 ARG. Return NULL_TREE if no simplification can be made. */
9236 fold_builtin_signbit (tree arg, tree type)
9240 if (!validate_arg (arg, REAL_TYPE))
9243 /* If ARG is a compile-time constant, determine the result. */
9244 if (TREE_CODE (arg) == REAL_CST
9245 && !TREE_OVERFLOW (arg))
9249 c = TREE_REAL_CST (arg);
9250 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9251 return fold_convert (type, temp);
9254 /* If ARG is non-negative, the result is always zero. */
9255 if (tree_expr_nonnegative_p (arg))
9256 return omit_one_operand (type, integer_zero_node, arg);
9258 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9259 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9260 return fold_build2 (LT_EXPR, type, arg,
9261 build_real (TREE_TYPE (arg), dconst0));
9266 /* Fold function call to builtin copysign, copysignf or copysignl with
9267 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9271 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9275 if (!validate_arg (arg1, REAL_TYPE)
9276 || !validate_arg (arg2, REAL_TYPE))
9279 /* copysign(X,X) is X. */
9280 if (operand_equal_p (arg1, arg2, 0))
9281 return fold_convert (type, arg1);
9283 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9284 if (TREE_CODE (arg1) == REAL_CST
9285 && TREE_CODE (arg2) == REAL_CST
9286 && !TREE_OVERFLOW (arg1)
9287 && !TREE_OVERFLOW (arg2))
9289 REAL_VALUE_TYPE c1, c2;
9291 c1 = TREE_REAL_CST (arg1);
9292 c2 = TREE_REAL_CST (arg2);
9293 /* c1.sign := c2.sign. */
9294 real_copysign (&c1, &c2);
9295 return build_real (type, c1);
9298 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9299 Remember to evaluate Y for side-effects. */
9300 if (tree_expr_nonnegative_p (arg2))
9301 return omit_one_operand (type,
9302 fold_build1 (ABS_EXPR, type, arg1),
9305 /* Strip sign changing operations for the first argument. */
9306 tem = fold_strip_sign_ops (arg1);
9308 return build_call_expr (fndecl, 2, tem, arg2);
9313 /* Fold a call to builtin isascii with argument ARG. */
9316 fold_builtin_isascii (tree arg)
9318 if (!validate_arg (arg, INTEGER_TYPE))
9322 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9323 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9324 build_int_cst (NULL_TREE,
9325 ~ (unsigned HOST_WIDE_INT) 0x7f));
9326 return fold_build2 (EQ_EXPR, integer_type_node,
9327 arg, integer_zero_node);
9331 /* Fold a call to builtin toascii with argument ARG. */
9334 fold_builtin_toascii (tree arg)
9336 if (!validate_arg (arg, INTEGER_TYPE))
9339 /* Transform toascii(c) -> (c & 0x7f). */
9340 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9341 build_int_cst (NULL_TREE, 0x7f));
9344 /* Fold a call to builtin isdigit with argument ARG. */
9347 fold_builtin_isdigit (tree arg)
9349 if (!validate_arg (arg, INTEGER_TYPE))
9353 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9354 /* According to the C standard, isdigit is unaffected by locale.
9355 However, it definitely is affected by the target character set. */
9356 unsigned HOST_WIDE_INT target_digit0
9357 = lang_hooks.to_target_charset ('0');
9359 if (target_digit0 == 0)
9362 arg = fold_convert (unsigned_type_node, arg);
9363 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9364 build_int_cst (unsigned_type_node, target_digit0));
9365 return fold_build2 (LE_EXPR, integer_type_node, arg,
9366 build_int_cst (unsigned_type_node, 9));
9370 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9373 fold_builtin_fabs (tree arg, tree type)
9375 if (!validate_arg (arg, REAL_TYPE))
9378 arg = fold_convert (type, arg);
9379 if (TREE_CODE (arg) == REAL_CST)
9380 return fold_abs_const (arg, type);
9381 return fold_build1 (ABS_EXPR, type, arg);
9384 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9387 fold_builtin_abs (tree arg, tree type)
9389 if (!validate_arg (arg, INTEGER_TYPE))
9392 arg = fold_convert (type, arg);
9393 if (TREE_CODE (arg) == INTEGER_CST)
9394 return fold_abs_const (arg, type);
9395 return fold_build1 (ABS_EXPR, type, arg);
9398 /* Fold a call to builtin fmin or fmax. */
9401 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9403 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9405 /* Calculate the result when the argument is a constant. */
9406 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9411 /* If either argument is NaN, return the other one. Avoid the
9412 transformation if we get (and honor) a signalling NaN. Using
9413 omit_one_operand() ensures we create a non-lvalue. */
9414 if (TREE_CODE (arg0) == REAL_CST
9415 && real_isnan (&TREE_REAL_CST (arg0))
9416 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9417 || ! TREE_REAL_CST (arg0).signalling))
9418 return omit_one_operand (type, arg1, arg0);
9419 if (TREE_CODE (arg1) == REAL_CST
9420 && real_isnan (&TREE_REAL_CST (arg1))
9421 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9422 || ! TREE_REAL_CST (arg1).signalling))
9423 return omit_one_operand (type, arg0, arg1);
9425 /* Transform fmin/fmax(x,x) -> x. */
9426 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9427 return omit_one_operand (type, arg0, arg1);
9429 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9430 functions to return the numeric arg if the other one is NaN.
9431 These tree codes don't honor that, so only transform if
9432 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9433 handled, so we don't have to worry about it either. */
9434 if (flag_finite_math_only)
9435 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9436 fold_convert (type, arg0),
9437 fold_convert (type, arg1));
9442 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9445 fold_builtin_carg (tree arg, tree type)
9447 if (validate_arg (arg, COMPLEX_TYPE))
9449 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9453 tree new_arg = builtin_save_expr (arg);
9454 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9455 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9456 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9463 /* Fold a call to builtin logb/ilogb. */
9466 fold_builtin_logb (tree arg, tree rettype)
9468 if (! validate_arg (arg, REAL_TYPE))
9473 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9475 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9481 /* If arg is Inf or NaN and we're logb, return it. */
9482 if (TREE_CODE (rettype) == REAL_TYPE)
9483 return fold_convert (rettype, arg);
9484 /* Fall through... */
9486 /* Zero may set errno and/or raise an exception for logb, also
9487 for ilogb we don't know FP_ILOGB0. */
9490 /* For normal numbers, proceed iff radix == 2. In GCC,
9491 normalized significands are in the range [0.5, 1.0). We
9492 want the exponent as if they were [1.0, 2.0) so get the
9493 exponent and subtract 1. */
9494 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9495 return fold_convert (rettype, build_int_cst (NULL_TREE,
9496 REAL_EXP (value)-1));
9504 /* Fold a call to builtin significand, if radix == 2. */
9507 fold_builtin_significand (tree arg, tree rettype)
9509 if (! validate_arg (arg, REAL_TYPE))
9514 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9516 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9523 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9524 return fold_convert (rettype, arg);
9526 /* For normal numbers, proceed iff radix == 2. */
9527 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9529 REAL_VALUE_TYPE result = *value;
9530 /* In GCC, normalized significands are in the range [0.5,
9531 1.0). We want them to be [1.0, 2.0) so set the
9533 SET_REAL_EXP (&result, 1);
9534 return build_real (rettype, result);
9543 /* Fold a call to builtin frexp, we can assume the base is 2. */
9546 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9548 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9553 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9556 arg1 = build_fold_indirect_ref (arg1);
9558 /* Proceed if a valid pointer type was passed in. */
9559 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9561 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9567 /* For +-0, return (*exp = 0, +-0). */
9568 exp = integer_zero_node;
9573 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9574 return omit_one_operand (rettype, arg0, arg1);
9577 /* Since the frexp function always expects base 2, and in
9578 GCC normalized significands are already in the range
9579 [0.5, 1.0), we have exactly what frexp wants. */
9580 REAL_VALUE_TYPE frac_rvt = *value;
9581 SET_REAL_EXP (&frac_rvt, 0);
9582 frac = build_real (rettype, frac_rvt);
9583 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9590 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9591 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9592 TREE_SIDE_EFFECTS (arg1) = 1;
9593 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9599 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9600 then we can assume the base is two. If it's false, then we have to
9601 check the mode of the TYPE parameter in certain cases. */
9604 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9606 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9611 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9612 if (real_zerop (arg0) || integer_zerop (arg1)
9613 || (TREE_CODE (arg0) == REAL_CST
9614 && !real_isfinite (&TREE_REAL_CST (arg0))))
9615 return omit_one_operand (type, arg0, arg1);
9617 /* If both arguments are constant, then try to evaluate it. */
9618 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9619 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9620 && host_integerp (arg1, 0))
9622 /* Bound the maximum adjustment to twice the range of the
9623 mode's valid exponents. Use abs to ensure the range is
9624 positive as a sanity check. */
9625 const long max_exp_adj = 2 *
9626 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9627 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9629 /* Get the user-requested adjustment. */
9630 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9632 /* The requested adjustment must be inside this range. This
9633 is a preliminary cap to avoid things like overflow, we
9634 may still fail to compute the result for other reasons. */
9635 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9637 REAL_VALUE_TYPE initial_result;
9639 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9641 /* Ensure we didn't overflow. */
9642 if (! real_isinf (&initial_result))
9644 const REAL_VALUE_TYPE trunc_result
9645 = real_value_truncate (TYPE_MODE (type), initial_result);
9647 /* Only proceed if the target mode can hold the
9649 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9650 return build_real (type, trunc_result);
9659 /* Fold a call to builtin modf. */
9662 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9664 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9669 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9672 arg1 = build_fold_indirect_ref (arg1);
9674 /* Proceed if a valid pointer type was passed in. */
9675 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9677 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9678 REAL_VALUE_TYPE trunc, frac;
9684 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9685 trunc = frac = *value;
9688 /* For +-Inf, return (*arg1 = arg0, +-0). */
9690 frac.sign = value->sign;
9694 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9695 real_trunc (&trunc, VOIDmode, value);
9696 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9697 /* If the original number was negative and already
9698 integral, then the fractional part is -0.0. */
9699 if (value->sign && frac.cl == rvc_zero)
9700 frac.sign = value->sign;
9704 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9705 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9706 build_real (rettype, trunc));
9707 TREE_SIDE_EFFECTS (arg1) = 1;
9708 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9709 build_real (rettype, frac));
9715 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9716 ARG is the argument for the call. */
9719 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9721 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9724 if (!validate_arg (arg, REAL_TYPE))
9727 switch (builtin_index)
9729 case BUILT_IN_ISINF:
9730 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9731 return omit_one_operand (type, integer_zero_node, arg);
9733 if (TREE_CODE (arg) == REAL_CST)
9735 r = TREE_REAL_CST (arg);
9736 if (real_isinf (&r))
9737 return real_compare (GT_EXPR, &r, &dconst0)
9738 ? integer_one_node : integer_minus_one_node;
9740 return integer_zero_node;
9745 case BUILT_IN_ISINF_SIGN:
9747 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9748 /* In a boolean context, GCC will fold the inner COND_EXPR to
9749 1. So e.g. "if (isinf_sign(x))" would be folded to just
9750 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9751 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9752 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9753 tree tmp = NULL_TREE;
9755 arg = builtin_save_expr (arg);
9757 if (signbit_fn && isinf_fn)
9759 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9760 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9762 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9763 signbit_call, integer_zero_node);
9764 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9765 isinf_call, integer_zero_node);
9767 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9768 integer_minus_one_node, integer_one_node);
9769 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9776 case BUILT_IN_ISFINITE:
9777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9778 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9779 return omit_one_operand (type, integer_one_node, arg);
9781 if (TREE_CODE (arg) == REAL_CST)
9783 r = TREE_REAL_CST (arg);
9784 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9789 case BUILT_IN_ISNAN:
9790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9791 return omit_one_operand (type, integer_zero_node, arg);
9793 if (TREE_CODE (arg) == REAL_CST)
9795 r = TREE_REAL_CST (arg);
9796 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9799 arg = builtin_save_expr (arg);
9800 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9807 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9808 This builtin will generate code to return the appropriate floating
9809 point classification depending on the value of the floating point
9810 number passed in. The possible return values must be supplied as
9811 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9812 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9813 one floating point argument which is "type generic". */
9816 fold_builtin_fpclassify (tree exp)
9818 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9819 arg, type, res, tmp;
9820 enum machine_mode mode;
9824 /* Verify the required arguments in the original call. */
9825 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9826 INTEGER_TYPE, INTEGER_TYPE,
9827 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9830 fp_nan = CALL_EXPR_ARG (exp, 0);
9831 fp_infinite = CALL_EXPR_ARG (exp, 1);
9832 fp_normal = CALL_EXPR_ARG (exp, 2);
9833 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9834 fp_zero = CALL_EXPR_ARG (exp, 4);
9835 arg = CALL_EXPR_ARG (exp, 5);
9836 type = TREE_TYPE (arg);
9837 mode = TYPE_MODE (type);
9838 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9842 (fabs(x) == Inf ? FP_INFINITE :
9843 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9844 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9846 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9847 build_real (type, dconst0));
9848 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9850 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9851 real_from_string (&r, buf);
9852 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9853 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9855 if (HONOR_INFINITIES (mode))
9858 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9859 build_real (type, r));
9860 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9863 if (HONOR_NANS (mode))
9865 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9866 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9872 /* Fold a call to an unordered comparison function such as
9873 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9874 being called and ARG0 and ARG1 are the arguments for the call.
9875 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9876 the opposite of the desired result. UNORDERED_CODE is used
9877 for modes that can hold NaNs and ORDERED_CODE is used for
9881 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9882 enum tree_code unordered_code,
9883 enum tree_code ordered_code)
9885 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9886 enum tree_code code;
9888 enum tree_code code0, code1;
9889 tree cmp_type = NULL_TREE;
9891 type0 = TREE_TYPE (arg0);
9892 type1 = TREE_TYPE (arg1);
9894 code0 = TREE_CODE (type0);
9895 code1 = TREE_CODE (type1);
9897 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9898 /* Choose the wider of two real types. */
9899 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9901 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9903 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9906 arg0 = fold_convert (cmp_type, arg0);
9907 arg1 = fold_convert (cmp_type, arg1);
9909 if (unordered_code == UNORDERED_EXPR)
9911 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9912 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9913 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9916 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9918 return fold_build1 (TRUTH_NOT_EXPR, type,
9919 fold_build2 (code, type, arg0, arg1));
9922 /* Fold a call to built-in function FNDECL with 0 arguments.
9923 IGNORE is true if the result of the function call is ignored. This
9924 function returns NULL_TREE if no simplification was possible. */
9927 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9929 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9930 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9933 CASE_FLT_FN (BUILT_IN_INF):
9934 case BUILT_IN_INFD32:
9935 case BUILT_IN_INFD64:
9936 case BUILT_IN_INFD128:
9937 return fold_builtin_inf (type, true);
9939 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9940 return fold_builtin_inf (type, false);
9942 case BUILT_IN_CLASSIFY_TYPE:
9943 return fold_builtin_classify_type (NULL_TREE);
9951 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9952 IGNORE is true if the result of the function call is ignored. This
9953 function returns NULL_TREE if no simplification was possible. */
9956 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9958 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9963 case BUILT_IN_CONSTANT_P:
9965 tree val = fold_builtin_constant_p (arg0);
9967 /* Gimplification will pull the CALL_EXPR for the builtin out of
9968 an if condition. When not optimizing, we'll not CSE it back.
9969 To avoid link error types of regressions, return false now. */
9970 if (!val && !optimize)
9971 val = integer_zero_node;
9976 case BUILT_IN_CLASSIFY_TYPE:
9977 return fold_builtin_classify_type (arg0);
9979 case BUILT_IN_STRLEN:
9980 return fold_builtin_strlen (arg0);
9982 CASE_FLT_FN (BUILT_IN_FABS):
9983 return fold_builtin_fabs (arg0, type);
9987 case BUILT_IN_LLABS:
9988 case BUILT_IN_IMAXABS:
9989 return fold_builtin_abs (arg0, type);
9991 CASE_FLT_FN (BUILT_IN_CONJ):
9992 if (validate_arg (arg0, COMPLEX_TYPE))
9993 return fold_build1 (CONJ_EXPR, type, arg0);
9996 CASE_FLT_FN (BUILT_IN_CREAL):
9997 if (validate_arg (arg0, COMPLEX_TYPE))
9998 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10001 CASE_FLT_FN (BUILT_IN_CIMAG):
10002 if (validate_arg (arg0, COMPLEX_TYPE))
10003 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10006 CASE_FLT_FN (BUILT_IN_CCOS):
10007 CASE_FLT_FN (BUILT_IN_CCOSH):
10008 /* These functions are "even", i.e. f(x) == f(-x). */
10009 if (validate_arg (arg0, COMPLEX_TYPE))
10011 tree narg = fold_strip_sign_ops (arg0);
10013 return build_call_expr (fndecl, 1, narg);
10017 CASE_FLT_FN (BUILT_IN_CABS):
10018 return fold_builtin_cabs (arg0, type, fndecl);
10020 CASE_FLT_FN (BUILT_IN_CARG):
10021 return fold_builtin_carg (arg0, type);
10023 CASE_FLT_FN (BUILT_IN_SQRT):
10024 return fold_builtin_sqrt (arg0, type);
10026 CASE_FLT_FN (BUILT_IN_CBRT):
10027 return fold_builtin_cbrt (arg0, type);
10029 CASE_FLT_FN (BUILT_IN_ASIN):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10032 &dconstm1, &dconst1, true);
10035 CASE_FLT_FN (BUILT_IN_ACOS):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10038 &dconstm1, &dconst1, true);
10041 CASE_FLT_FN (BUILT_IN_ATAN):
10042 if (validate_arg (arg0, REAL_TYPE))
10043 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10046 CASE_FLT_FN (BUILT_IN_ASINH):
10047 if (validate_arg (arg0, REAL_TYPE))
10048 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10051 CASE_FLT_FN (BUILT_IN_ACOSH):
10052 if (validate_arg (arg0, REAL_TYPE))
10053 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10054 &dconst1, NULL, true);
10057 CASE_FLT_FN (BUILT_IN_ATANH):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10060 &dconstm1, &dconst1, false);
10063 CASE_FLT_FN (BUILT_IN_SIN):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10068 CASE_FLT_FN (BUILT_IN_COS):
10069 return fold_builtin_cos (arg0, type, fndecl);
10072 CASE_FLT_FN (BUILT_IN_TAN):
10073 return fold_builtin_tan (arg0, type);
10075 CASE_FLT_FN (BUILT_IN_CEXP):
10076 return fold_builtin_cexp (arg0, type);
10078 CASE_FLT_FN (BUILT_IN_CEXPI):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10083 CASE_FLT_FN (BUILT_IN_SINH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10088 CASE_FLT_FN (BUILT_IN_COSH):
10089 return fold_builtin_cosh (arg0, type, fndecl);
10091 CASE_FLT_FN (BUILT_IN_TANH):
10092 if (validate_arg (arg0, REAL_TYPE))
10093 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10096 CASE_FLT_FN (BUILT_IN_ERF):
10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10101 CASE_FLT_FN (BUILT_IN_ERFC):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10106 CASE_FLT_FN (BUILT_IN_TGAMMA):
10107 if (validate_arg (arg0, REAL_TYPE))
10108 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10111 CASE_FLT_FN (BUILT_IN_EXP):
10112 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10114 CASE_FLT_FN (BUILT_IN_EXP2):
10115 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10117 CASE_FLT_FN (BUILT_IN_EXP10):
10118 CASE_FLT_FN (BUILT_IN_POW10):
10119 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10121 CASE_FLT_FN (BUILT_IN_EXPM1):
10122 if (validate_arg (arg0, REAL_TYPE))
10123 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10126 CASE_FLT_FN (BUILT_IN_LOG):
10127 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10129 CASE_FLT_FN (BUILT_IN_LOG2):
10130 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10132 CASE_FLT_FN (BUILT_IN_LOG10):
10133 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10135 CASE_FLT_FN (BUILT_IN_LOG1P):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10138 &dconstm1, NULL, false);
10141 CASE_FLT_FN (BUILT_IN_J0):
10142 if (validate_arg (arg0, REAL_TYPE))
10143 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10147 CASE_FLT_FN (BUILT_IN_J1):
10148 if (validate_arg (arg0, REAL_TYPE))
10149 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10153 CASE_FLT_FN (BUILT_IN_Y0):
10154 if (validate_arg (arg0, REAL_TYPE))
10155 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10156 &dconst0, NULL, false);
10159 CASE_FLT_FN (BUILT_IN_Y1):
10160 if (validate_arg (arg0, REAL_TYPE))
10161 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10162 &dconst0, NULL, false);
10165 CASE_FLT_FN (BUILT_IN_NAN):
10166 case BUILT_IN_NAND32:
10167 case BUILT_IN_NAND64:
10168 case BUILT_IN_NAND128:
10169 return fold_builtin_nan (arg0, type, true);
10171 CASE_FLT_FN (BUILT_IN_NANS):
10172 return fold_builtin_nan (arg0, type, false);
10174 CASE_FLT_FN (BUILT_IN_FLOOR):
10175 return fold_builtin_floor (fndecl, arg0);
10177 CASE_FLT_FN (BUILT_IN_CEIL):
10178 return fold_builtin_ceil (fndecl, arg0);
10180 CASE_FLT_FN (BUILT_IN_TRUNC):
10181 return fold_builtin_trunc (fndecl, arg0);
10183 CASE_FLT_FN (BUILT_IN_ROUND):
10184 return fold_builtin_round (fndecl, arg0);
10186 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10187 CASE_FLT_FN (BUILT_IN_RINT):
10188 return fold_trunc_transparent_mathfn (fndecl, arg0);
10190 CASE_FLT_FN (BUILT_IN_LCEIL):
10191 CASE_FLT_FN (BUILT_IN_LLCEIL):
10192 CASE_FLT_FN (BUILT_IN_LFLOOR):
10193 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10194 CASE_FLT_FN (BUILT_IN_LROUND):
10195 CASE_FLT_FN (BUILT_IN_LLROUND):
10196 return fold_builtin_int_roundingfn (fndecl, arg0);
10198 CASE_FLT_FN (BUILT_IN_LRINT):
10199 CASE_FLT_FN (BUILT_IN_LLRINT):
10200 return fold_fixed_mathfn (fndecl, arg0);
10202 case BUILT_IN_BSWAP32:
10203 case BUILT_IN_BSWAP64:
10204 return fold_builtin_bswap (fndecl, arg0);
10206 CASE_INT_FN (BUILT_IN_FFS):
10207 CASE_INT_FN (BUILT_IN_CLZ):
10208 CASE_INT_FN (BUILT_IN_CTZ):
10209 CASE_INT_FN (BUILT_IN_POPCOUNT):
10210 CASE_INT_FN (BUILT_IN_PARITY):
10211 return fold_builtin_bitop (fndecl, arg0);
10213 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10214 return fold_builtin_signbit (arg0, type);
10216 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10217 return fold_builtin_significand (arg0, type);
10219 CASE_FLT_FN (BUILT_IN_ILOGB):
10220 CASE_FLT_FN (BUILT_IN_LOGB):
10221 return fold_builtin_logb (arg0, type);
10223 case BUILT_IN_ISASCII:
10224 return fold_builtin_isascii (arg0);
10226 case BUILT_IN_TOASCII:
10227 return fold_builtin_toascii (arg0);
10229 case BUILT_IN_ISDIGIT:
10230 return fold_builtin_isdigit (arg0);
10232 CASE_FLT_FN (BUILT_IN_FINITE):
10233 case BUILT_IN_FINITED32:
10234 case BUILT_IN_FINITED64:
10235 case BUILT_IN_FINITED128:
10236 case BUILT_IN_ISFINITE:
10237 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10239 CASE_FLT_FN (BUILT_IN_ISINF):
10240 case BUILT_IN_ISINFD32:
10241 case BUILT_IN_ISINFD64:
10242 case BUILT_IN_ISINFD128:
10243 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10245 case BUILT_IN_ISINF_SIGN:
10246 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10248 CASE_FLT_FN (BUILT_IN_ISNAN):
10249 case BUILT_IN_ISNAND32:
10250 case BUILT_IN_ISNAND64:
10251 case BUILT_IN_ISNAND128:
10252 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10254 case BUILT_IN_PRINTF:
10255 case BUILT_IN_PRINTF_UNLOCKED:
10256 case BUILT_IN_VPRINTF:
10257 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10267 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10268 IGNORE is true if the result of the function call is ignored. This
10269 function returns NULL_TREE if no simplification was possible. */
10272 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10274 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10275 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10279 CASE_FLT_FN (BUILT_IN_JN):
10280 if (validate_arg (arg0, INTEGER_TYPE)
10281 && validate_arg (arg1, REAL_TYPE))
10282 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10285 CASE_FLT_FN (BUILT_IN_YN):
10286 if (validate_arg (arg0, INTEGER_TYPE)
10287 && validate_arg (arg1, REAL_TYPE))
10288 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10292 CASE_FLT_FN (BUILT_IN_DREM):
10293 CASE_FLT_FN (BUILT_IN_REMAINDER):
10294 if (validate_arg (arg0, REAL_TYPE)
10295 && validate_arg(arg1, REAL_TYPE))
10296 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10299 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10300 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10301 if (validate_arg (arg0, REAL_TYPE)
10302 && validate_arg(arg1, POINTER_TYPE))
10303 return do_mpfr_lgamma_r (arg0, arg1, type);
10306 CASE_FLT_FN (BUILT_IN_ATAN2):
10307 if (validate_arg (arg0, REAL_TYPE)
10308 && validate_arg(arg1, REAL_TYPE))
10309 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10312 CASE_FLT_FN (BUILT_IN_FDIM):
10313 if (validate_arg (arg0, REAL_TYPE)
10314 && validate_arg(arg1, REAL_TYPE))
10315 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10318 CASE_FLT_FN (BUILT_IN_HYPOT):
10319 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10321 CASE_FLT_FN (BUILT_IN_LDEXP):
10322 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10323 CASE_FLT_FN (BUILT_IN_SCALBN):
10324 CASE_FLT_FN (BUILT_IN_SCALBLN):
10325 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10327 CASE_FLT_FN (BUILT_IN_FREXP):
10328 return fold_builtin_frexp (arg0, arg1, type);
10330 CASE_FLT_FN (BUILT_IN_MODF):
10331 return fold_builtin_modf (arg0, arg1, type);
10333 case BUILT_IN_BZERO:
10334 return fold_builtin_bzero (arg0, arg1, ignore);
10336 case BUILT_IN_FPUTS:
10337 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10339 case BUILT_IN_FPUTS_UNLOCKED:
10340 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10342 case BUILT_IN_STRSTR:
10343 return fold_builtin_strstr (arg0, arg1, type);
10345 case BUILT_IN_STRCAT:
10346 return fold_builtin_strcat (arg0, arg1);
10348 case BUILT_IN_STRSPN:
10349 return fold_builtin_strspn (arg0, arg1);
10351 case BUILT_IN_STRCSPN:
10352 return fold_builtin_strcspn (arg0, arg1);
10354 case BUILT_IN_STRCHR:
10355 case BUILT_IN_INDEX:
10356 return fold_builtin_strchr (arg0, arg1, type);
10358 case BUILT_IN_STRRCHR:
10359 case BUILT_IN_RINDEX:
10360 return fold_builtin_strrchr (arg0, arg1, type);
10362 case BUILT_IN_STRCPY:
10363 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10365 case BUILT_IN_STRCMP:
10366 return fold_builtin_strcmp (arg0, arg1);
10368 case BUILT_IN_STRPBRK:
10369 return fold_builtin_strpbrk (arg0, arg1, type);
10371 case BUILT_IN_EXPECT:
10372 return fold_builtin_expect (arg0, arg1);
10374 CASE_FLT_FN (BUILT_IN_POW):
10375 return fold_builtin_pow (fndecl, arg0, arg1, type);
10377 CASE_FLT_FN (BUILT_IN_POWI):
10378 return fold_builtin_powi (fndecl, arg0, arg1, type);
10380 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10381 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10383 CASE_FLT_FN (BUILT_IN_FMIN):
10384 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10386 CASE_FLT_FN (BUILT_IN_FMAX):
10387 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10389 case BUILT_IN_ISGREATER:
10390 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10391 case BUILT_IN_ISGREATEREQUAL:
10392 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10393 case BUILT_IN_ISLESS:
10394 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10395 case BUILT_IN_ISLESSEQUAL:
10396 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10397 case BUILT_IN_ISLESSGREATER:
10398 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10399 case BUILT_IN_ISUNORDERED:
10400 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10403 /* We do the folding for va_start in the expander. */
10404 case BUILT_IN_VA_START:
10407 case BUILT_IN_SPRINTF:
10408 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10410 case BUILT_IN_OBJECT_SIZE:
10411 return fold_builtin_object_size (arg0, arg1);
10413 case BUILT_IN_PRINTF:
10414 case BUILT_IN_PRINTF_UNLOCKED:
10415 case BUILT_IN_VPRINTF:
10416 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10418 case BUILT_IN_PRINTF_CHK:
10419 case BUILT_IN_VPRINTF_CHK:
10420 if (!validate_arg (arg0, INTEGER_TYPE)
10421 || TREE_SIDE_EFFECTS (arg0))
10424 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10427 case BUILT_IN_FPRINTF:
10428 case BUILT_IN_FPRINTF_UNLOCKED:
10429 case BUILT_IN_VFPRINTF:
10430 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10439 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10440 and ARG2. IGNORE is true if the result of the function call is ignored.
10441 This function returns NULL_TREE if no simplification was possible. */
10444 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10447 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10451 CASE_FLT_FN (BUILT_IN_SINCOS):
10452 return fold_builtin_sincos (arg0, arg1, arg2);
10454 CASE_FLT_FN (BUILT_IN_FMA):
10455 if (validate_arg (arg0, REAL_TYPE)
10456 && validate_arg(arg1, REAL_TYPE)
10457 && validate_arg(arg2, REAL_TYPE))
10458 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10461 CASE_FLT_FN (BUILT_IN_REMQUO):
10462 if (validate_arg (arg0, REAL_TYPE)
10463 && validate_arg(arg1, REAL_TYPE)
10464 && validate_arg(arg2, POINTER_TYPE))
10465 return do_mpfr_remquo (arg0, arg1, arg2);
10468 case BUILT_IN_MEMSET:
10469 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10471 case BUILT_IN_BCOPY:
10472 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10474 case BUILT_IN_MEMCPY:
10475 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10477 case BUILT_IN_MEMPCPY:
10478 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10480 case BUILT_IN_MEMMOVE:
10481 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10483 case BUILT_IN_STRNCAT:
10484 return fold_builtin_strncat (arg0, arg1, arg2);
10486 case BUILT_IN_STRNCPY:
10487 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10489 case BUILT_IN_STRNCMP:
10490 return fold_builtin_strncmp (arg0, arg1, arg2);
10492 case BUILT_IN_MEMCHR:
10493 return fold_builtin_memchr (arg0, arg1, arg2, type);
10495 case BUILT_IN_BCMP:
10496 case BUILT_IN_MEMCMP:
10497 return fold_builtin_memcmp (arg0, arg1, arg2);;
10499 case BUILT_IN_SPRINTF:
10500 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10502 case BUILT_IN_STRCPY_CHK:
10503 case BUILT_IN_STPCPY_CHK:
10504 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10507 case BUILT_IN_STRCAT_CHK:
10508 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10510 case BUILT_IN_PRINTF_CHK:
10511 case BUILT_IN_VPRINTF_CHK:
10512 if (!validate_arg (arg0, INTEGER_TYPE)
10513 || TREE_SIDE_EFFECTS (arg0))
10516 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10519 case BUILT_IN_FPRINTF:
10520 case BUILT_IN_FPRINTF_UNLOCKED:
10521 case BUILT_IN_VFPRINTF:
10522 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10524 case BUILT_IN_FPRINTF_CHK:
10525 case BUILT_IN_VFPRINTF_CHK:
10526 if (!validate_arg (arg1, INTEGER_TYPE)
10527 || TREE_SIDE_EFFECTS (arg1))
10530 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10539 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10540 ARG2, and ARG3. IGNORE is true if the result of the function call is
10541 ignored. This function returns NULL_TREE if no simplification was
10545 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10548 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10552 case BUILT_IN_MEMCPY_CHK:
10553 case BUILT_IN_MEMPCPY_CHK:
10554 case BUILT_IN_MEMMOVE_CHK:
10555 case BUILT_IN_MEMSET_CHK:
10556 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10558 DECL_FUNCTION_CODE (fndecl));
10560 case BUILT_IN_STRNCPY_CHK:
10561 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10563 case BUILT_IN_STRNCAT_CHK:
10564 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10566 case BUILT_IN_FPRINTF_CHK:
10567 case BUILT_IN_VFPRINTF_CHK:
10568 if (!validate_arg (arg1, INTEGER_TYPE)
10569 || TREE_SIDE_EFFECTS (arg1))
10572 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10582 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10583 arguments, where NARGS <= 4. IGNORE is true if the result of the
10584 function call is ignored. This function returns NULL_TREE if no
10585 simplification was possible. Note that this only folds builtins with
10586 fixed argument patterns. Foldings that do varargs-to-varargs
10587 transformations, or that match calls with more than 4 arguments,
10588 need to be handled with fold_builtin_varargs instead. */
10590 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10593 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10595 tree ret = NULL_TREE;
10600 ret = fold_builtin_0 (fndecl, ignore);
10603 ret = fold_builtin_1 (fndecl, args[0], ignore);
10606 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10609 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10612 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10620 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10621 TREE_NO_WARNING (ret) = 1;
10627 /* Builtins with folding operations that operate on "..." arguments
10628 need special handling; we need to store the arguments in a convenient
10629 data structure before attempting any folding. Fortunately there are
10630 only a few builtins that fall into this category. FNDECL is the
10631 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10632 result of the function call is ignored. */
10635 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10637 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10638 tree ret = NULL_TREE;
10642 case BUILT_IN_SPRINTF_CHK:
10643 case BUILT_IN_VSPRINTF_CHK:
10644 ret = fold_builtin_sprintf_chk (exp, fcode);
10647 case BUILT_IN_SNPRINTF_CHK:
10648 case BUILT_IN_VSNPRINTF_CHK:
10649 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10652 case BUILT_IN_FPCLASSIFY:
10653 ret = fold_builtin_fpclassify (exp);
10661 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10662 TREE_NO_WARNING (ret) = 1;
10668 /* A wrapper function for builtin folding that prevents warnings for
10669 "statement without effect" and the like, caused by removing the
10670 call node earlier than the warning is generated. */
10673 fold_call_expr (tree exp, bool ignore)
10675 tree ret = NULL_TREE;
10676 tree fndecl = get_callee_fndecl (exp);
10678 && TREE_CODE (fndecl) == FUNCTION_DECL
10679 && DECL_BUILT_IN (fndecl)
10680 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10681 yet. Defer folding until we see all the arguments
10682 (after inlining). */
10683 && !CALL_EXPR_VA_ARG_PACK (exp))
10685 int nargs = call_expr_nargs (exp);
10687 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10688 instead last argument is __builtin_va_arg_pack (). Defer folding
10689 even in that case, until arguments are finalized. */
10690 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10692 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10694 && TREE_CODE (fndecl2) == FUNCTION_DECL
10695 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10696 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10700 /* FIXME: Don't use a list in this interface. */
10701 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10702 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10705 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10707 tree *args = CALL_EXPR_ARGP (exp);
10708 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10711 ret = fold_builtin_varargs (fndecl, exp, ignore);
10714 /* Propagate location information from original call to
10715 expansion of builtin. Otherwise things like
10716 maybe_emit_chk_warning, that operate on the expansion
10717 of a builtin, will use the wrong location information. */
10718 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10720 tree realret = ret;
10721 if (TREE_CODE (ret) == NOP_EXPR)
10722 realret = TREE_OPERAND (ret, 0);
10723 if (CAN_HAVE_LOCATION_P (realret)
10724 && !EXPR_HAS_LOCATION (realret))
10725 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10735 /* Conveniently construct a function call expression. FNDECL names the
10736 function to be called and ARGLIST is a TREE_LIST of arguments. */
10739 build_function_call_expr (tree fndecl, tree arglist)
10741 tree fntype = TREE_TYPE (fndecl);
10742 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10743 int n = list_length (arglist);
10744 tree *argarray = (tree *) alloca (n * sizeof (tree));
10747 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10748 argarray[i] = TREE_VALUE (arglist);
10749 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10752 /* Conveniently construct a function call expression. FNDECL names the
10753 function to be called, N is the number of arguments, and the "..."
10754 parameters are the argument expressions. */
10757 build_call_expr (tree fndecl, int n, ...)
10760 tree fntype = TREE_TYPE (fndecl);
10761 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10762 tree *argarray = (tree *) alloca (n * sizeof (tree));
10766 for (i = 0; i < n; i++)
10767 argarray[i] = va_arg (ap, tree);
10769 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10772 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10773 N arguments are passed in the array ARGARRAY. */
10776 fold_builtin_call_array (tree type,
10781 tree ret = NULL_TREE;
10785 if (TREE_CODE (fn) == ADDR_EXPR)
10787 tree fndecl = TREE_OPERAND (fn, 0);
10788 if (TREE_CODE (fndecl) == FUNCTION_DECL
10789 && DECL_BUILT_IN (fndecl))
10791 /* If last argument is __builtin_va_arg_pack (), arguments to this
10792 function are not finalized yet. Defer folding until they are. */
10793 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10795 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10797 && TREE_CODE (fndecl2) == FUNCTION_DECL
10798 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10799 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10800 return build_call_array (type, fn, n, argarray);
10802 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10804 tree arglist = NULL_TREE;
10805 for (i = n - 1; i >= 0; i--)
10806 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10807 ret = targetm.fold_builtin (fndecl, arglist, false);
10811 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10813 /* First try the transformations that don't require consing up
10815 ret = fold_builtin_n (fndecl, argarray, n, false);
10820 /* If we got this far, we need to build an exp. */
10821 exp = build_call_array (type, fn, n, argarray);
10822 ret = fold_builtin_varargs (fndecl, exp, false);
10823 return ret ? ret : exp;
10827 return build_call_array (type, fn, n, argarray);
10830 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10831 along with N new arguments specified as the "..." parameters. SKIP
10832 is the number of arguments in EXP to be omitted. This function is used
10833 to do varargs-to-varargs transformations. */
10836 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10838 int oldnargs = call_expr_nargs (exp);
10839 int nargs = oldnargs - skip + n;
10840 tree fntype = TREE_TYPE (fndecl);
10841 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10849 buffer = XALLOCAVEC (tree, nargs);
10851 for (i = 0; i < n; i++)
10852 buffer[i] = va_arg (ap, tree);
10854 for (j = skip; j < oldnargs; j++, i++)
10855 buffer[i] = CALL_EXPR_ARG (exp, j);
10858 buffer = CALL_EXPR_ARGP (exp) + skip;
10860 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10863 /* Validate a single argument ARG against a tree code CODE representing
10867 validate_arg (const_tree arg, enum tree_code code)
10871 else if (code == POINTER_TYPE)
10872 return POINTER_TYPE_P (TREE_TYPE (arg));
10873 else if (code == INTEGER_TYPE)
10874 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10875 return code == TREE_CODE (TREE_TYPE (arg));
10878 /* This function validates the types of a function call argument list
10879 against a specified list of tree_codes. If the last specifier is a 0,
10880 that represents an ellipses, otherwise the last specifier must be a
10883 This is the GIMPLE version of validate_arglist. Eventually we want to
10884 completely convert builtins.c to work from GIMPLEs and the tree based
10885 validate_arglist will then be removed. */
10888 validate_gimple_arglist (const_gimple call, ...)
10890 enum tree_code code;
10896 va_start (ap, call);
10901 code = va_arg (ap, enum tree_code);
10905 /* This signifies an ellipses, any further arguments are all ok. */
10909 /* This signifies an endlink, if no arguments remain, return
10910 true, otherwise return false. */
10911 res = (i == gimple_call_num_args (call));
10914 /* If no parameters remain or the parameter's code does not
10915 match the specified code, return false. Otherwise continue
10916 checking any remaining arguments. */
10917 arg = gimple_call_arg (call, i++);
10918 if (!validate_arg (arg, code))
10925 /* We need gotos here since we can only have one VA_CLOSE in a
10933 /* This function validates the types of a function call argument list
10934 against a specified list of tree_codes. If the last specifier is a 0,
10935 that represents an ellipses, otherwise the last specifier must be a
10939 validate_arglist (const_tree callexpr, ...)
10941 enum tree_code code;
10944 const_call_expr_arg_iterator iter;
10947 va_start (ap, callexpr);
10948 init_const_call_expr_arg_iterator (callexpr, &iter);
10952 code = va_arg (ap, enum tree_code);
10956 /* This signifies an ellipses, any further arguments are all ok. */
10960 /* This signifies an endlink, if no arguments remain, return
10961 true, otherwise return false. */
10962 res = !more_const_call_expr_args_p (&iter);
10965 /* If no parameters remain or the parameter's code does not
10966 match the specified code, return false. Otherwise continue
10967 checking any remaining arguments. */
10968 arg = next_const_call_expr_arg (&iter);
10969 if (!validate_arg (arg, code))
10976 /* We need gotos here since we can only have one VA_CLOSE in a
10984 /* Default target-specific builtin expander that does nothing. */
10987 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10988 rtx target ATTRIBUTE_UNUSED,
10989 rtx subtarget ATTRIBUTE_UNUSED,
10990 enum machine_mode mode ATTRIBUTE_UNUSED,
10991 int ignore ATTRIBUTE_UNUSED)
10996 /* Returns true is EXP represents data that would potentially reside
10997 in a readonly section. */
11000 readonly_data_expr (tree exp)
11004 if (TREE_CODE (exp) != ADDR_EXPR)
11007 exp = get_base_address (TREE_OPERAND (exp, 0));
11011 /* Make sure we call decl_readonly_section only for trees it
11012 can handle (since it returns true for everything it doesn't
11014 if (TREE_CODE (exp) == STRING_CST
11015 || TREE_CODE (exp) == CONSTRUCTOR
11016 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11017 return decl_readonly_section (exp, 0);
11022 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11023 to the call, and TYPE is its return type.
11025 Return NULL_TREE if no simplification was possible, otherwise return the
11026 simplified form of the call as a tree.
11028 The simplified form may be a constant or other expression which
11029 computes the same value, but in a more efficient manner (including
11030 calls to other builtin functions).
11032 The call may contain arguments which need to be evaluated, but
11033 which are not useful to determine the result of the call. In
11034 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11035 COMPOUND_EXPR will be an argument which must be evaluated.
11036 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11037 COMPOUND_EXPR in the chain will contain the tree for the simplified
11038 form of the builtin function call. */
11041 fold_builtin_strstr (tree s1, tree s2, tree type)
11043 if (!validate_arg (s1, POINTER_TYPE)
11044 || !validate_arg (s2, POINTER_TYPE))
11049 const char *p1, *p2;
11051 p2 = c_getstr (s2);
11055 p1 = c_getstr (s1);
11058 const char *r = strstr (p1, p2);
11062 return build_int_cst (TREE_TYPE (s1), 0);
11064 /* Return an offset into the constant string argument. */
11065 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11066 s1, size_int (r - p1));
11067 return fold_convert (type, tem);
11070 /* The argument is const char *, and the result is char *, so we need
11071 a type conversion here to avoid a warning. */
11073 return fold_convert (type, s1);
11078 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11082 /* New argument list transforming strstr(s1, s2) to
11083 strchr(s1, s2[0]). */
11084 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11088 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11089 the call, and TYPE is its return type.
11091 Return NULL_TREE if no simplification was possible, otherwise return the
11092 simplified form of the call as a tree.
11094 The simplified form may be a constant or other expression which
11095 computes the same value, but in a more efficient manner (including
11096 calls to other builtin functions).
11098 The call may contain arguments which need to be evaluated, but
11099 which are not useful to determine the result of the call. In
11100 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11101 COMPOUND_EXPR will be an argument which must be evaluated.
11102 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11103 COMPOUND_EXPR in the chain will contain the tree for the simplified
11104 form of the builtin function call. */
11107 fold_builtin_strchr (tree s1, tree s2, tree type)
11109 if (!validate_arg (s1, POINTER_TYPE)
11110 || !validate_arg (s2, INTEGER_TYPE))
11116 if (TREE_CODE (s2) != INTEGER_CST)
11119 p1 = c_getstr (s1);
11126 if (target_char_cast (s2, &c))
11129 r = strchr (p1, c);
11132 return build_int_cst (TREE_TYPE (s1), 0);
11134 /* Return an offset into the constant string argument. */
11135 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11136 s1, size_int (r - p1));
11137 return fold_convert (type, tem);
11143 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11144 the call, and TYPE is its return type.
11146 Return NULL_TREE if no simplification was possible, otherwise return the
11147 simplified form of the call as a tree.
11149 The simplified form may be a constant or other expression which
11150 computes the same value, but in a more efficient manner (including
11151 calls to other builtin functions).
11153 The call may contain arguments which need to be evaluated, but
11154 which are not useful to determine the result of the call. In
11155 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11156 COMPOUND_EXPR will be an argument which must be evaluated.
11157 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11158 COMPOUND_EXPR in the chain will contain the tree for the simplified
11159 form of the builtin function call. */
11162 fold_builtin_strrchr (tree s1, tree s2, tree type)
11164 if (!validate_arg (s1, POINTER_TYPE)
11165 || !validate_arg (s2, INTEGER_TYPE))
11172 if (TREE_CODE (s2) != INTEGER_CST)
11175 p1 = c_getstr (s1);
11182 if (target_char_cast (s2, &c))
11185 r = strrchr (p1, c);
11188 return build_int_cst (TREE_TYPE (s1), 0);
11190 /* Return an offset into the constant string argument. */
11191 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11192 s1, size_int (r - p1));
11193 return fold_convert (type, tem);
11196 if (! integer_zerop (s2))
11199 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11203 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11204 return build_call_expr (fn, 2, s1, s2);
11208 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11209 to the call, and TYPE is its return type.
11211 Return NULL_TREE if no simplification was possible, otherwise return the
11212 simplified form of the call as a tree.
11214 The simplified form may be a constant or other expression which
11215 computes the same value, but in a more efficient manner (including
11216 calls to other builtin functions).
11218 The call may contain arguments which need to be evaluated, but
11219 which are not useful to determine the result of the call. In
11220 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11221 COMPOUND_EXPR will be an argument which must be evaluated.
11222 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11223 COMPOUND_EXPR in the chain will contain the tree for the simplified
11224 form of the builtin function call. */
11227 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11229 if (!validate_arg (s1, POINTER_TYPE)
11230 || !validate_arg (s2, POINTER_TYPE))
11235 const char *p1, *p2;
11237 p2 = c_getstr (s2);
11241 p1 = c_getstr (s1);
11244 const char *r = strpbrk (p1, p2);
11248 return build_int_cst (TREE_TYPE (s1), 0);
11250 /* Return an offset into the constant string argument. */
11251 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11252 s1, size_int (r - p1));
11253 return fold_convert (type, tem);
11257 /* strpbrk(x, "") == NULL.
11258 Evaluate and ignore s1 in case it had side-effects. */
11259 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11262 return NULL_TREE; /* Really call strpbrk. */
11264 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11268 /* New argument list transforming strpbrk(s1, s2) to
11269 strchr(s1, s2[0]). */
11270 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11274 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11277 Return NULL_TREE if no simplification was possible, otherwise return the
11278 simplified form of the call as a tree.
11280 The simplified form may be a constant or other expression which
11281 computes the same value, but in a more efficient manner (including
11282 calls to other builtin functions).
11284 The call may contain arguments which need to be evaluated, but
11285 which are not useful to determine the result of the call. In
11286 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11287 COMPOUND_EXPR will be an argument which must be evaluated.
11288 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11289 COMPOUND_EXPR in the chain will contain the tree for the simplified
11290 form of the builtin function call. */
11293 fold_builtin_strcat (tree dst, tree src)
11295 if (!validate_arg (dst, POINTER_TYPE)
11296 || !validate_arg (src, POINTER_TYPE))
11300 const char *p = c_getstr (src);
11302 /* If the string length is zero, return the dst parameter. */
11303 if (p && *p == '\0')
11310 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11311 arguments to the call.
11313 Return NULL_TREE if no simplification was possible, otherwise return the
11314 simplified form of the call as a tree.
11316 The simplified form may be a constant or other expression which
11317 computes the same value, but in a more efficient manner (including
11318 calls to other builtin functions).
11320 The call may contain arguments which need to be evaluated, but
11321 which are not useful to determine the result of the call. In
11322 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11323 COMPOUND_EXPR will be an argument which must be evaluated.
11324 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11325 COMPOUND_EXPR in the chain will contain the tree for the simplified
11326 form of the builtin function call. */
11329 fold_builtin_strncat (tree dst, tree src, tree len)
11331 if (!validate_arg (dst, POINTER_TYPE)
11332 || !validate_arg (src, POINTER_TYPE)
11333 || !validate_arg (len, INTEGER_TYPE))
11337 const char *p = c_getstr (src);
11339 /* If the requested length is zero, or the src parameter string
11340 length is zero, return the dst parameter. */
11341 if (integer_zerop (len) || (p && *p == '\0'))
11342 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11344 /* If the requested len is greater than or equal to the string
11345 length, call strcat. */
11346 if (TREE_CODE (len) == INTEGER_CST && p
11347 && compare_tree_int (len, strlen (p)) >= 0)
11349 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11351 /* If the replacement _DECL isn't initialized, don't do the
11356 return build_call_expr (fn, 2, dst, src);
11362 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11365 Return NULL_TREE if no simplification was possible, otherwise return the
11366 simplified form of the call as a tree.
11368 The simplified form may be a constant or other expression which
11369 computes the same value, but in a more efficient manner (including
11370 calls to other builtin functions).
11372 The call may contain arguments which need to be evaluated, but
11373 which are not useful to determine the result of the call. In
11374 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11375 COMPOUND_EXPR will be an argument which must be evaluated.
11376 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11377 COMPOUND_EXPR in the chain will contain the tree for the simplified
11378 form of the builtin function call. */
11381 fold_builtin_strspn (tree s1, tree s2)
11383 if (!validate_arg (s1, POINTER_TYPE)
11384 || !validate_arg (s2, POINTER_TYPE))
11388 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11390 /* If both arguments are constants, evaluate at compile-time. */
11393 const size_t r = strspn (p1, p2);
11394 return size_int (r);
11397 /* If either argument is "", return NULL_TREE. */
11398 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11399 /* Evaluate and ignore both arguments in case either one has
11401 return omit_two_operands (integer_type_node, integer_zero_node,
11407 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11410 Return NULL_TREE if no simplification was possible, otherwise return the
11411 simplified form of the call as a tree.
11413 The simplified form may be a constant or other expression which
11414 computes the same value, but in a more efficient manner (including
11415 calls to other builtin functions).
11417 The call may contain arguments which need to be evaluated, but
11418 which are not useful to determine the result of the call. In
11419 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11420 COMPOUND_EXPR will be an argument which must be evaluated.
11421 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11422 COMPOUND_EXPR in the chain will contain the tree for the simplified
11423 form of the builtin function call. */
11426 fold_builtin_strcspn (tree s1, tree s2)
11428 if (!validate_arg (s1, POINTER_TYPE)
11429 || !validate_arg (s2, POINTER_TYPE))
11433 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11435 /* If both arguments are constants, evaluate at compile-time. */
11438 const size_t r = strcspn (p1, p2);
11439 return size_int (r);
11442 /* If the first argument is "", return NULL_TREE. */
11443 if (p1 && *p1 == '\0')
11445 /* Evaluate and ignore argument s2 in case it has
11447 return omit_one_operand (integer_type_node,
11448 integer_zero_node, s2);
11451 /* If the second argument is "", return __builtin_strlen(s1). */
11452 if (p2 && *p2 == '\0')
11454 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11456 /* If the replacement _DECL isn't initialized, don't do the
11461 return build_call_expr (fn, 1, s1);
11467 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11468 to the call. IGNORE is true if the value returned
11469 by the builtin will be ignored. UNLOCKED is true is true if this
11470 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11471 the known length of the string. Return NULL_TREE if no simplification
11475 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11477 /* If we're using an unlocked function, assume the other unlocked
11478 functions exist explicitly. */
11479 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11480 : implicit_built_in_decls[BUILT_IN_FPUTC];
11481 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11482 : implicit_built_in_decls[BUILT_IN_FWRITE];
11484 /* If the return value is used, don't do the transformation. */
11488 /* Verify the arguments in the original call. */
11489 if (!validate_arg (arg0, POINTER_TYPE)
11490 || !validate_arg (arg1, POINTER_TYPE))
11494 len = c_strlen (arg0, 0);
11496 /* Get the length of the string passed to fputs. If the length
11497 can't be determined, punt. */
11499 || TREE_CODE (len) != INTEGER_CST)
11502 switch (compare_tree_int (len, 1))
11504 case -1: /* length is 0, delete the call entirely . */
11505 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11507 case 0: /* length is 1, call fputc. */
11509 const char *p = c_getstr (arg0);
11514 return build_call_expr (fn_fputc, 2,
11515 build_int_cst (NULL_TREE, p[0]), arg1);
11521 case 1: /* length is greater than 1, call fwrite. */
11523 /* If optimizing for size keep fputs. */
11524 if (optimize_function_for_size_p (cfun))
11526 /* New argument list transforming fputs(string, stream) to
11527 fwrite(string, 1, len, stream). */
11529 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11534 gcc_unreachable ();
11539 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11540 produced. False otherwise. This is done so that we don't output the error
11541 or warning twice or three times. */
11544 fold_builtin_next_arg (tree exp, bool va_start_p)
11546 tree fntype = TREE_TYPE (current_function_decl);
11547 int nargs = call_expr_nargs (exp);
11550 if (TYPE_ARG_TYPES (fntype) == 0
11551 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11552 == void_type_node))
11554 error ("%<va_start%> used in function with fixed args");
11560 if (va_start_p && (nargs != 2))
11562 error ("wrong number of arguments to function %<va_start%>");
11565 arg = CALL_EXPR_ARG (exp, 1);
11567 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11568 when we checked the arguments and if needed issued a warning. */
11573 /* Evidently an out of date version of <stdarg.h>; can't validate
11574 va_start's second argument, but can still work as intended. */
11575 warning (0, "%<__builtin_next_arg%> called without an argument");
11578 else if (nargs > 1)
11580 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11583 arg = CALL_EXPR_ARG (exp, 0);
11586 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11587 or __builtin_next_arg (0) the first time we see it, after checking
11588 the arguments and if needed issuing a warning. */
11589 if (!integer_zerop (arg))
11591 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11593 /* Strip off all nops for the sake of the comparison. This
11594 is not quite the same as STRIP_NOPS. It does more.
11595 We must also strip off INDIRECT_EXPR for C++ reference
11597 while (CONVERT_EXPR_P (arg)
11598 || TREE_CODE (arg) == INDIRECT_REF)
11599 arg = TREE_OPERAND (arg, 0);
11600 if (arg != last_parm)
11602 /* FIXME: Sometimes with the tree optimizers we can get the
11603 not the last argument even though the user used the last
11604 argument. We just warn and set the arg to be the last
11605 argument so that we will get wrong-code because of
11607 warning (0, "second parameter of %<va_start%> not last named argument");
11610 /* Undefined by C99 7.15.1.4p4 (va_start):
11611 "If the parameter parmN is declared with the register storage
11612 class, with a function or array type, or with a type that is
11613 not compatible with the type that results after application of
11614 the default argument promotions, the behavior is undefined."
11616 else if (DECL_REGISTER (arg))
11617 warning (0, "undefined behaviour when second parameter of "
11618 "%<va_start%> is declared with %<register%> storage");
11620 /* We want to verify the second parameter just once before the tree
11621 optimizers are run and then avoid keeping it in the tree,
11622 as otherwise we could warn even for correct code like:
11623 void foo (int i, ...)
11624 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11626 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11628 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11634 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11635 ORIG may be null if this is a 2-argument call. We don't attempt to
11636 simplify calls with more than 3 arguments.
11638 Return NULL_TREE if no simplification was possible, otherwise return the
11639 simplified form of the call as a tree. If IGNORED is true, it means that
11640 the caller does not use the returned value of the function. */
11643 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11646 const char *fmt_str = NULL;
11648 /* Verify the required arguments in the original call. We deal with two
11649 types of sprintf() calls: 'sprintf (str, fmt)' and
11650 'sprintf (dest, "%s", orig)'. */
11651 if (!validate_arg (dest, POINTER_TYPE)
11652 || !validate_arg (fmt, POINTER_TYPE))
11654 if (orig && !validate_arg (orig, POINTER_TYPE))
11657 /* Check whether the format is a literal string constant. */
11658 fmt_str = c_getstr (fmt);
11659 if (fmt_str == NULL)
11663 retval = NULL_TREE;
11665 if (!init_target_chars ())
11668 /* If the format doesn't contain % args or %%, use strcpy. */
11669 if (strchr (fmt_str, target_percent) == NULL)
11671 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11676 /* Don't optimize sprintf (buf, "abc", ptr++). */
11680 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11681 'format' is known to contain no % formats. */
11682 call = build_call_expr (fn, 2, dest, fmt);
11684 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11687 /* If the format is "%s", use strcpy if the result isn't used. */
11688 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11691 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11696 /* Don't crash on sprintf (str1, "%s"). */
11700 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11703 retval = c_strlen (orig, 1);
11704 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11707 call = build_call_expr (fn, 2, dest, orig);
11710 if (call && retval)
11712 retval = fold_convert
11713 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11715 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11721 /* Expand a call EXP to __builtin_object_size. */
11724 expand_builtin_object_size (tree exp)
11727 int object_size_type;
11728 tree fndecl = get_callee_fndecl (exp);
11730 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11732 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11734 expand_builtin_trap ();
11738 ost = CALL_EXPR_ARG (exp, 1);
11741 if (TREE_CODE (ost) != INTEGER_CST
11742 || tree_int_cst_sgn (ost) < 0
11743 || compare_tree_int (ost, 3) > 0)
11745 error ("%Klast argument of %D is not integer constant between 0 and 3",
11747 expand_builtin_trap ();
11751 object_size_type = tree_low_cst (ost, 0);
11753 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11756 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11757 FCODE is the BUILT_IN_* to use.
11758 Return NULL_RTX if we failed; the caller should emit a normal call,
11759 otherwise try to get the result in TARGET, if convenient (and in
11760 mode MODE if that's convenient). */
11763 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11764 enum built_in_function fcode)
11766 tree dest, src, len, size;
11768 if (!validate_arglist (exp,
11770 fcode == BUILT_IN_MEMSET_CHK
11771 ? INTEGER_TYPE : POINTER_TYPE,
11772 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11775 dest = CALL_EXPR_ARG (exp, 0);
11776 src = CALL_EXPR_ARG (exp, 1);
11777 len = CALL_EXPR_ARG (exp, 2);
11778 size = CALL_EXPR_ARG (exp, 3);
11780 if (! host_integerp (size, 1))
11783 if (host_integerp (len, 1) || integer_all_onesp (size))
11787 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11789 warning (0, "%Kcall to %D will always overflow destination buffer",
11790 exp, get_callee_fndecl (exp));
11795 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11796 mem{cpy,pcpy,move,set} is available. */
11799 case BUILT_IN_MEMCPY_CHK:
11800 fn = built_in_decls[BUILT_IN_MEMCPY];
11802 case BUILT_IN_MEMPCPY_CHK:
11803 fn = built_in_decls[BUILT_IN_MEMPCPY];
11805 case BUILT_IN_MEMMOVE_CHK:
11806 fn = built_in_decls[BUILT_IN_MEMMOVE];
11808 case BUILT_IN_MEMSET_CHK:
11809 fn = built_in_decls[BUILT_IN_MEMSET];
11818 fn = build_call_expr (fn, 3, dest, src, len);
11819 STRIP_TYPE_NOPS (fn);
11820 while (TREE_CODE (fn) == COMPOUND_EXPR)
11822 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11824 fn = TREE_OPERAND (fn, 1);
11826 if (TREE_CODE (fn) == CALL_EXPR)
11827 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11828 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11830 else if (fcode == BUILT_IN_MEMSET_CHK)
11834 unsigned int dest_align
11835 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11837 /* If DEST is not a pointer type, call the normal function. */
11838 if (dest_align == 0)
11841 /* If SRC and DEST are the same (and not volatile), do nothing. */
11842 if (operand_equal_p (src, dest, 0))
11846 if (fcode != BUILT_IN_MEMPCPY_CHK)
11848 /* Evaluate and ignore LEN in case it has side-effects. */
11849 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11850 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11853 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11854 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11857 /* __memmove_chk special case. */
11858 if (fcode == BUILT_IN_MEMMOVE_CHK)
11860 unsigned int src_align
11861 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11863 if (src_align == 0)
11866 /* If src is categorized for a readonly section we can use
11867 normal __memcpy_chk. */
11868 if (readonly_data_expr (src))
11870 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11873 fn = build_call_expr (fn, 4, dest, src, len, size);
11874 STRIP_TYPE_NOPS (fn);
11875 while (TREE_CODE (fn) == COMPOUND_EXPR)
11877 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11879 fn = TREE_OPERAND (fn, 1);
11881 if (TREE_CODE (fn) == CALL_EXPR)
11882 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11883 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11890 /* Emit warning if a buffer overflow is detected at compile time. */
11893 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11900 case BUILT_IN_STRCPY_CHK:
11901 case BUILT_IN_STPCPY_CHK:
11902 /* For __strcat_chk the warning will be emitted only if overflowing
11903 by at least strlen (dest) + 1 bytes. */
11904 case BUILT_IN_STRCAT_CHK:
11905 len = CALL_EXPR_ARG (exp, 1);
11906 size = CALL_EXPR_ARG (exp, 2);
11909 case BUILT_IN_STRNCAT_CHK:
11910 case BUILT_IN_STRNCPY_CHK:
11911 len = CALL_EXPR_ARG (exp, 2);
11912 size = CALL_EXPR_ARG (exp, 3);
11914 case BUILT_IN_SNPRINTF_CHK:
11915 case BUILT_IN_VSNPRINTF_CHK:
11916 len = CALL_EXPR_ARG (exp, 1);
11917 size = CALL_EXPR_ARG (exp, 3);
11920 gcc_unreachable ();
11926 if (! host_integerp (size, 1) || integer_all_onesp (size))
11931 len = c_strlen (len, 1);
11932 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11935 else if (fcode == BUILT_IN_STRNCAT_CHK)
11937 tree src = CALL_EXPR_ARG (exp, 1);
11938 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11940 src = c_strlen (src, 1);
11941 if (! src || ! host_integerp (src, 1))
11943 warning (0, "%Kcall to %D might overflow destination buffer",
11944 exp, get_callee_fndecl (exp));
11947 else if (tree_int_cst_lt (src, size))
11950 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11953 warning (0, "%Kcall to %D will always overflow destination buffer",
11954 exp, get_callee_fndecl (exp));
11957 /* Emit warning if a buffer overflow is detected at compile time
11958 in __sprintf_chk/__vsprintf_chk calls. */
11961 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11963 tree dest, size, len, fmt, flag;
11964 const char *fmt_str;
11965 int nargs = call_expr_nargs (exp);
11967 /* Verify the required arguments in the original call. */
11971 dest = CALL_EXPR_ARG (exp, 0);
11972 flag = CALL_EXPR_ARG (exp, 1);
11973 size = CALL_EXPR_ARG (exp, 2);
11974 fmt = CALL_EXPR_ARG (exp, 3);
11976 if (! host_integerp (size, 1) || integer_all_onesp (size))
11979 /* Check whether the format is a literal string constant. */
11980 fmt_str = c_getstr (fmt);
11981 if (fmt_str == NULL)
11984 if (!init_target_chars ())
11987 /* If the format doesn't contain % args or %%, we know its size. */
11988 if (strchr (fmt_str, target_percent) == 0)
11989 len = build_int_cstu (size_type_node, strlen (fmt_str));
11990 /* If the format is "%s" and first ... argument is a string literal,
11992 else if (fcode == BUILT_IN_SPRINTF_CHK
11993 && strcmp (fmt_str, target_percent_s) == 0)
11999 arg = CALL_EXPR_ARG (exp, 4);
12000 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12003 len = c_strlen (arg, 1);
12004 if (!len || ! host_integerp (len, 1))
12010 if (! tree_int_cst_lt (len, size))
12012 warning (0, "%Kcall to %D will always overflow destination buffer",
12013 exp, get_callee_fndecl (exp));
12017 /* Emit warning if a free is called with address of a variable. */
12020 maybe_emit_free_warning (tree exp)
12022 tree arg = CALL_EXPR_ARG (exp, 0);
12025 if (TREE_CODE (arg) != ADDR_EXPR)
12028 arg = get_base_address (TREE_OPERAND (arg, 0));
12029 if (arg == NULL || INDIRECT_REF_P (arg))
12032 if (SSA_VAR_P (arg))
12033 warning (0, "%Kattempt to free a non-heap object %qD", exp, arg);
12035 warning (0, "%Kattempt to free a non-heap object", exp);
12038 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12042 fold_builtin_object_size (tree ptr, tree ost)
12044 tree ret = NULL_TREE;
12045 int object_size_type;
12047 if (!validate_arg (ptr, POINTER_TYPE)
12048 || !validate_arg (ost, INTEGER_TYPE))
12053 if (TREE_CODE (ost) != INTEGER_CST
12054 || tree_int_cst_sgn (ost) < 0
12055 || compare_tree_int (ost, 3) > 0)
12058 object_size_type = tree_low_cst (ost, 0);
12060 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12061 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12062 and (size_t) 0 for types 2 and 3. */
12063 if (TREE_SIDE_EFFECTS (ptr))
12064 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12066 if (TREE_CODE (ptr) == ADDR_EXPR)
12067 ret = build_int_cstu (size_type_node,
12068 compute_builtin_object_size (ptr, object_size_type));
12070 else if (TREE_CODE (ptr) == SSA_NAME)
12072 unsigned HOST_WIDE_INT bytes;
12074 /* If object size is not known yet, delay folding until
12075 later. Maybe subsequent passes will help determining
12077 bytes = compute_builtin_object_size (ptr, object_size_type);
12078 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12080 ret = build_int_cstu (size_type_node, bytes);
12085 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12086 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12087 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12094 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12095 DEST, SRC, LEN, and SIZE are the arguments to the call.
12096 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12097 code of the builtin. If MAXLEN is not NULL, it is maximum length
12098 passed as third argument. */
12101 fold_builtin_memory_chk (tree fndecl,
12102 tree dest, tree src, tree len, tree size,
12103 tree maxlen, bool ignore,
12104 enum built_in_function fcode)
12108 if (!validate_arg (dest, POINTER_TYPE)
12109 || !validate_arg (src,
12110 (fcode == BUILT_IN_MEMSET_CHK
12111 ? INTEGER_TYPE : POINTER_TYPE))
12112 || !validate_arg (len, INTEGER_TYPE)
12113 || !validate_arg (size, INTEGER_TYPE))
12116 /* If SRC and DEST are the same (and not volatile), return DEST
12117 (resp. DEST+LEN for __mempcpy_chk). */
12118 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12120 if (fcode != BUILT_IN_MEMPCPY_CHK)
12121 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12124 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12125 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12129 if (! host_integerp (size, 1))
12132 if (! integer_all_onesp (size))
12134 if (! host_integerp (len, 1))
12136 /* If LEN is not constant, try MAXLEN too.
12137 For MAXLEN only allow optimizing into non-_ocs function
12138 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12139 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12141 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12143 /* (void) __mempcpy_chk () can be optimized into
12144 (void) __memcpy_chk (). */
12145 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12149 return build_call_expr (fn, 4, dest, src, len, size);
12157 if (tree_int_cst_lt (size, maxlen))
12162 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12163 mem{cpy,pcpy,move,set} is available. */
12166 case BUILT_IN_MEMCPY_CHK:
12167 fn = built_in_decls[BUILT_IN_MEMCPY];
12169 case BUILT_IN_MEMPCPY_CHK:
12170 fn = built_in_decls[BUILT_IN_MEMPCPY];
12172 case BUILT_IN_MEMMOVE_CHK:
12173 fn = built_in_decls[BUILT_IN_MEMMOVE];
12175 case BUILT_IN_MEMSET_CHK:
12176 fn = built_in_decls[BUILT_IN_MEMSET];
12185 return build_call_expr (fn, 3, dest, src, len);
12188 /* Fold a call to the __st[rp]cpy_chk builtin.
12189 DEST, SRC, and SIZE are the arguments to the call.
12190 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12191 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12192 strings passed as second argument. */
12195 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12196 tree maxlen, bool ignore,
12197 enum built_in_function fcode)
12201 if (!validate_arg (dest, POINTER_TYPE)
12202 || !validate_arg (src, POINTER_TYPE)
12203 || !validate_arg (size, INTEGER_TYPE))
12206 /* If SRC and DEST are the same (and not volatile), return DEST. */
12207 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12208 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12210 if (! host_integerp (size, 1))
12213 if (! integer_all_onesp (size))
12215 len = c_strlen (src, 1);
12216 if (! len || ! host_integerp (len, 1))
12218 /* If LEN is not constant, try MAXLEN too.
12219 For MAXLEN only allow optimizing into non-_ocs function
12220 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12221 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12223 if (fcode == BUILT_IN_STPCPY_CHK)
12228 /* If return value of __stpcpy_chk is ignored,
12229 optimize into __strcpy_chk. */
12230 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12234 return build_call_expr (fn, 3, dest, src, size);
12237 if (! len || TREE_SIDE_EFFECTS (len))
12240 /* If c_strlen returned something, but not a constant,
12241 transform __strcpy_chk into __memcpy_chk. */
12242 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12246 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12247 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12248 build_call_expr (fn, 4,
12249 dest, src, len, size));
12255 if (! tree_int_cst_lt (maxlen, size))
12259 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12260 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12261 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12265 return build_call_expr (fn, 2, dest, src);
12268 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12269 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12270 length passed as third argument. */
12273 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12278 if (!validate_arg (dest, POINTER_TYPE)
12279 || !validate_arg (src, POINTER_TYPE)
12280 || !validate_arg (len, INTEGER_TYPE)
12281 || !validate_arg (size, INTEGER_TYPE))
12284 if (! host_integerp (size, 1))
12287 if (! integer_all_onesp (size))
12289 if (! host_integerp (len, 1))
12291 /* If LEN is not constant, try MAXLEN too.
12292 For MAXLEN only allow optimizing into non-_ocs function
12293 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12294 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12300 if (tree_int_cst_lt (size, maxlen))
12304 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12305 fn = built_in_decls[BUILT_IN_STRNCPY];
12309 return build_call_expr (fn, 3, dest, src, len);
12312 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12313 are the arguments to the call. */
12316 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12321 if (!validate_arg (dest, POINTER_TYPE)
12322 || !validate_arg (src, POINTER_TYPE)
12323 || !validate_arg (size, INTEGER_TYPE))
12326 p = c_getstr (src);
12327 /* If the SRC parameter is "", return DEST. */
12328 if (p && *p == '\0')
12329 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12331 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12334 /* If __builtin_strcat_chk is used, assume strcat is available. */
12335 fn = built_in_decls[BUILT_IN_STRCAT];
12339 return build_call_expr (fn, 2, dest, src);
12342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12346 fold_builtin_strncat_chk (tree fndecl,
12347 tree dest, tree src, tree len, tree size)
12352 if (!validate_arg (dest, POINTER_TYPE)
12353 || !validate_arg (src, POINTER_TYPE)
12354 || !validate_arg (size, INTEGER_TYPE)
12355 || !validate_arg (size, INTEGER_TYPE))
12358 p = c_getstr (src);
12359 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12360 if (p && *p == '\0')
12361 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12362 else if (integer_zerop (len))
12363 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12365 if (! host_integerp (size, 1))
12368 if (! integer_all_onesp (size))
12370 tree src_len = c_strlen (src, 1);
12372 && host_integerp (src_len, 1)
12373 && host_integerp (len, 1)
12374 && ! tree_int_cst_lt (len, src_len))
12376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12377 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12381 return build_call_expr (fn, 3, dest, src, size);
12386 /* If __builtin_strncat_chk is used, assume strncat is available. */
12387 fn = built_in_decls[BUILT_IN_STRNCAT];
12391 return build_call_expr (fn, 3, dest, src, len);
12394 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12395 a normal call should be emitted rather than expanding the function
12396 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12399 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12401 tree dest, size, len, fn, fmt, flag;
12402 const char *fmt_str;
12403 int nargs = call_expr_nargs (exp);
12405 /* Verify the required arguments in the original call. */
12408 dest = CALL_EXPR_ARG (exp, 0);
12409 if (!validate_arg (dest, POINTER_TYPE))
12411 flag = CALL_EXPR_ARG (exp, 1);
12412 if (!validate_arg (flag, INTEGER_TYPE))
12414 size = CALL_EXPR_ARG (exp, 2);
12415 if (!validate_arg (size, INTEGER_TYPE))
12417 fmt = CALL_EXPR_ARG (exp, 3);
12418 if (!validate_arg (fmt, POINTER_TYPE))
12421 if (! host_integerp (size, 1))
12426 if (!init_target_chars ())
12429 /* Check whether the format is a literal string constant. */
12430 fmt_str = c_getstr (fmt);
12431 if (fmt_str != NULL)
12433 /* If the format doesn't contain % args or %%, we know the size. */
12434 if (strchr (fmt_str, target_percent) == 0)
12436 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12437 len = build_int_cstu (size_type_node, strlen (fmt_str));
12439 /* If the format is "%s" and first ... argument is a string literal,
12440 we know the size too. */
12441 else if (fcode == BUILT_IN_SPRINTF_CHK
12442 && strcmp (fmt_str, target_percent_s) == 0)
12448 arg = CALL_EXPR_ARG (exp, 4);
12449 if (validate_arg (arg, POINTER_TYPE))
12451 len = c_strlen (arg, 1);
12452 if (! len || ! host_integerp (len, 1))
12459 if (! integer_all_onesp (size))
12461 if (! len || ! tree_int_cst_lt (len, size))
12465 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12466 or if format doesn't contain % chars or is "%s". */
12467 if (! integer_zerop (flag))
12469 if (fmt_str == NULL)
12471 if (strchr (fmt_str, target_percent) != NULL
12472 && strcmp (fmt_str, target_percent_s))
12476 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12477 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12478 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12482 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12485 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12486 a normal call should be emitted rather than expanding the function
12487 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12488 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12489 passed as second argument. */
12492 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12493 enum built_in_function fcode)
12495 tree dest, size, len, fn, fmt, flag;
12496 const char *fmt_str;
12498 /* Verify the required arguments in the original call. */
12499 if (call_expr_nargs (exp) < 5)
12501 dest = CALL_EXPR_ARG (exp, 0);
12502 if (!validate_arg (dest, POINTER_TYPE))
12504 len = CALL_EXPR_ARG (exp, 1);
12505 if (!validate_arg (len, INTEGER_TYPE))
12507 flag = CALL_EXPR_ARG (exp, 2);
12508 if (!validate_arg (flag, INTEGER_TYPE))
12510 size = CALL_EXPR_ARG (exp, 3);
12511 if (!validate_arg (size, INTEGER_TYPE))
12513 fmt = CALL_EXPR_ARG (exp, 4);
12514 if (!validate_arg (fmt, POINTER_TYPE))
12517 if (! host_integerp (size, 1))
12520 if (! integer_all_onesp (size))
12522 if (! host_integerp (len, 1))
12524 /* If LEN is not constant, try MAXLEN too.
12525 For MAXLEN only allow optimizing into non-_ocs function
12526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12527 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12533 if (tree_int_cst_lt (size, maxlen))
12537 if (!init_target_chars ())
12540 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12541 or if format doesn't contain % chars or is "%s". */
12542 if (! integer_zerop (flag))
12544 fmt_str = c_getstr (fmt);
12545 if (fmt_str == NULL)
12547 if (strchr (fmt_str, target_percent) != NULL
12548 && strcmp (fmt_str, target_percent_s))
12552 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12554 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12555 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12559 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12562 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12563 FMT and ARG are the arguments to the call; we don't fold cases with
12564 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12566 Return NULL_TREE if no simplification was possible, otherwise return the
12567 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12568 code of the function to be simplified. */
12571 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12572 enum built_in_function fcode)
12574 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12575 const char *fmt_str = NULL;
12577 /* If the return value is used, don't do the transformation. */
12581 /* Verify the required arguments in the original call. */
12582 if (!validate_arg (fmt, POINTER_TYPE))
12585 /* Check whether the format is a literal string constant. */
12586 fmt_str = c_getstr (fmt);
12587 if (fmt_str == NULL)
12590 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12592 /* If we're using an unlocked function, assume the other
12593 unlocked functions exist explicitly. */
12594 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12595 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12599 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12600 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12603 if (!init_target_chars ())
12606 if (strcmp (fmt_str, target_percent_s) == 0
12607 || strchr (fmt_str, target_percent) == NULL)
12611 if (strcmp (fmt_str, target_percent_s) == 0)
12613 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12616 if (!arg || !validate_arg (arg, POINTER_TYPE))
12619 str = c_getstr (arg);
12625 /* The format specifier doesn't contain any '%' characters. */
12626 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12632 /* If the string was "", printf does nothing. */
12633 if (str[0] == '\0')
12634 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12636 /* If the string has length of 1, call putchar. */
12637 if (str[1] == '\0')
12639 /* Given printf("c"), (where c is any one character,)
12640 convert "c"[0] to an int and pass that to the replacement
12642 newarg = build_int_cst (NULL_TREE, str[0]);
12644 call = build_call_expr (fn_putchar, 1, newarg);
12648 /* If the string was "string\n", call puts("string"). */
12649 size_t len = strlen (str);
12650 if ((unsigned char)str[len - 1] == target_newline)
12652 /* Create a NUL-terminated string that's one char shorter
12653 than the original, stripping off the trailing '\n'. */
12654 char *newstr = XALLOCAVEC (char, len);
12655 memcpy (newstr, str, len - 1);
12656 newstr[len - 1] = 0;
12658 newarg = build_string_literal (len, newstr);
12660 call = build_call_expr (fn_puts, 1, newarg);
12663 /* We'd like to arrange to call fputs(string,stdout) here,
12664 but we need stdout and don't have a way to get it yet. */
12669 /* The other optimizations can be done only on the non-va_list variants. */
12670 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12673 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12674 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12676 if (!arg || !validate_arg (arg, POINTER_TYPE))
12679 call = build_call_expr (fn_puts, 1, arg);
12682 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12683 else if (strcmp (fmt_str, target_percent_c) == 0)
12685 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12688 call = build_call_expr (fn_putchar, 1, arg);
12694 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12697 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12698 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12699 more than 3 arguments, and ARG may be null in the 2-argument case.
12701 Return NULL_TREE if no simplification was possible, otherwise return the
12702 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12703 code of the function to be simplified. */
12706 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12707 enum built_in_function fcode)
12709 tree fn_fputc, fn_fputs, call = NULL_TREE;
12710 const char *fmt_str = NULL;
12712 /* If the return value is used, don't do the transformation. */
12716 /* Verify the required arguments in the original call. */
12717 if (!validate_arg (fp, POINTER_TYPE))
12719 if (!validate_arg (fmt, POINTER_TYPE))
12722 /* Check whether the format is a literal string constant. */
12723 fmt_str = c_getstr (fmt);
12724 if (fmt_str == NULL)
12727 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12729 /* If we're using an unlocked function, assume the other
12730 unlocked functions exist explicitly. */
12731 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12732 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12736 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12737 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12740 if (!init_target_chars ())
12743 /* If the format doesn't contain % args or %%, use strcpy. */
12744 if (strchr (fmt_str, target_percent) == NULL)
12746 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12750 /* If the format specifier was "", fprintf does nothing. */
12751 if (fmt_str[0] == '\0')
12753 /* If FP has side-effects, just wait until gimplification is
12755 if (TREE_SIDE_EFFECTS (fp))
12758 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12761 /* When "string" doesn't contain %, replace all cases of
12762 fprintf (fp, string) with fputs (string, fp). The fputs
12763 builtin will take care of special cases like length == 1. */
12765 call = build_call_expr (fn_fputs, 2, fmt, fp);
12768 /* The other optimizations can be done only on the non-va_list variants. */
12769 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12772 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12773 else if (strcmp (fmt_str, target_percent_s) == 0)
12775 if (!arg || !validate_arg (arg, POINTER_TYPE))
12778 call = build_call_expr (fn_fputs, 2, arg, fp);
12781 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12782 else if (strcmp (fmt_str, target_percent_c) == 0)
12784 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12787 call = build_call_expr (fn_fputc, 2, arg, fp);
12792 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12795 /* Initialize format string characters in the target charset. */
12798 init_target_chars (void)
12803 target_newline = lang_hooks.to_target_charset ('\n');
12804 target_percent = lang_hooks.to_target_charset ('%');
12805 target_c = lang_hooks.to_target_charset ('c');
12806 target_s = lang_hooks.to_target_charset ('s');
12807 if (target_newline == 0 || target_percent == 0 || target_c == 0
12811 target_percent_c[0] = target_percent;
12812 target_percent_c[1] = target_c;
12813 target_percent_c[2] = '\0';
12815 target_percent_s[0] = target_percent;
12816 target_percent_s[1] = target_s;
12817 target_percent_s[2] = '\0';
12819 target_percent_s_newline[0] = target_percent;
12820 target_percent_s_newline[1] = target_s;
12821 target_percent_s_newline[2] = target_newline;
12822 target_percent_s_newline[3] = '\0';
12829 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12830 and no overflow/underflow occurred. INEXACT is true if M was not
12831 exactly calculated. TYPE is the tree type for the result. This
12832 function assumes that you cleared the MPFR flags and then
12833 calculated M to see if anything subsequently set a flag prior to
12834 entering this function. Return NULL_TREE if any checks fail. */
12837 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12839 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12840 overflow/underflow occurred. If -frounding-math, proceed iff the
12841 result of calling FUNC was exact. */
12842 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12843 && (!flag_rounding_math || !inexact))
12845 REAL_VALUE_TYPE rr;
12847 real_from_mpfr (&rr, m, type, GMP_RNDN);
12848 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12849 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12850 but the mpft_t is not, then we underflowed in the
12852 if (real_isfinite (&rr)
12853 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12855 REAL_VALUE_TYPE rmode;
12857 real_convert (&rmode, TYPE_MODE (type), &rr);
12858 /* Proceed iff the specified mode can hold the value. */
12859 if (real_identical (&rmode, &rr))
12860 return build_real (type, rmode);
12866 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12867 FUNC on it and return the resulting value as a tree with type TYPE.
12868 If MIN and/or MAX are not NULL, then the supplied ARG must be
12869 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12870 acceptable values, otherwise they are not. The mpfr precision is
12871 set to the precision of TYPE. We assume that function FUNC returns
12872 zero if the result could be calculated exactly within the requested
12876 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12877 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12880 tree result = NULL_TREE;
12884 /* To proceed, MPFR must exactly represent the target floating point
12885 format, which only happens when the target base equals two. */
12886 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12887 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12889 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12891 if (real_isfinite (ra)
12892 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12893 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12895 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12896 const int prec = fmt->p;
12897 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12901 mpfr_init2 (m, prec);
12902 mpfr_from_real (m, ra, GMP_RNDN);
12903 mpfr_clear_flags ();
12904 inexact = func (m, m, rnd);
12905 result = do_mpfr_ckconv (m, type, inexact);
12913 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12914 FUNC on it and return the resulting value as a tree with type TYPE.
12915 The mpfr precision is set to the precision of TYPE. We assume that
12916 function FUNC returns zero if the result could be calculated
12917 exactly within the requested precision. */
12920 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12921 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12923 tree result = NULL_TREE;
12928 /* To proceed, MPFR must exactly represent the target floating point
12929 format, which only happens when the target base equals two. */
12930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12931 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12932 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12934 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12935 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12937 if (real_isfinite (ra1) && real_isfinite (ra2))
12939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12940 const int prec = fmt->p;
12941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12945 mpfr_inits2 (prec, m1, m2, NULL);
12946 mpfr_from_real (m1, ra1, GMP_RNDN);
12947 mpfr_from_real (m2, ra2, GMP_RNDN);
12948 mpfr_clear_flags ();
12949 inexact = func (m1, m1, m2, rnd);
12950 result = do_mpfr_ckconv (m1, type, inexact);
12951 mpfr_clears (m1, m2, NULL);
12958 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12959 FUNC on it and return the resulting value as a tree with type TYPE.
12960 The mpfr precision is set to the precision of TYPE. We assume that
12961 function FUNC returns zero if the result could be calculated
12962 exactly within the requested precision. */
12965 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12966 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12968 tree result = NULL_TREE;
12974 /* To proceed, MPFR must exactly represent the target floating point
12975 format, which only happens when the target base equals two. */
12976 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12977 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12978 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12979 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12981 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12982 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12983 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12985 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12987 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12988 const int prec = fmt->p;
12989 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12993 mpfr_inits2 (prec, m1, m2, m3, NULL);
12994 mpfr_from_real (m1, ra1, GMP_RNDN);
12995 mpfr_from_real (m2, ra2, GMP_RNDN);
12996 mpfr_from_real (m3, ra3, GMP_RNDN);
12997 mpfr_clear_flags ();
12998 inexact = func (m1, m1, m2, m3, rnd);
12999 result = do_mpfr_ckconv (m1, type, inexact);
13000 mpfr_clears (m1, m2, m3, NULL);
13007 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13008 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13009 If ARG_SINP and ARG_COSP are NULL then the result is returned
13010 as a complex value.
13011 The type is taken from the type of ARG and is used for setting the
13012 precision of the calculation and results. */
13015 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13017 tree const type = TREE_TYPE (arg);
13018 tree result = NULL_TREE;
13022 /* To proceed, MPFR must exactly represent the target floating point
13023 format, which only happens when the target base equals two. */
13024 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13025 && TREE_CODE (arg) == REAL_CST
13026 && !TREE_OVERFLOW (arg))
13028 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13030 if (real_isfinite (ra))
13032 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13033 const int prec = fmt->p;
13034 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13035 tree result_s, result_c;
13039 mpfr_inits2 (prec, m, ms, mc, NULL);
13040 mpfr_from_real (m, ra, GMP_RNDN);
13041 mpfr_clear_flags ();
13042 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13043 result_s = do_mpfr_ckconv (ms, type, inexact);
13044 result_c = do_mpfr_ckconv (mc, type, inexact);
13045 mpfr_clears (m, ms, mc, NULL);
13046 if (result_s && result_c)
13048 /* If we are to return in a complex value do so. */
13049 if (!arg_sinp && !arg_cosp)
13050 return build_complex (build_complex_type (type),
13051 result_c, result_s);
13053 /* Dereference the sin/cos pointer arguments. */
13054 arg_sinp = build_fold_indirect_ref (arg_sinp);
13055 arg_cosp = build_fold_indirect_ref (arg_cosp);
13056 /* Proceed if valid pointer type were passed in. */
13057 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13058 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13060 /* Set the values. */
13061 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13063 TREE_SIDE_EFFECTS (result_s) = 1;
13064 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13066 TREE_SIDE_EFFECTS (result_c) = 1;
13067 /* Combine the assignments into a compound expr. */
13068 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13069 result_s, result_c));
13077 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13078 two-argument mpfr order N Bessel function FUNC on them and return
13079 the resulting value as a tree with type TYPE. The mpfr precision
13080 is set to the precision of TYPE. We assume that function FUNC
13081 returns zero if the result could be calculated exactly within the
13082 requested precision. */
13084 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13085 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13086 const REAL_VALUE_TYPE *min, bool inclusive)
13088 tree result = NULL_TREE;
13093 /* To proceed, MPFR must exactly represent the target floating point
13094 format, which only happens when the target base equals two. */
13095 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13096 && host_integerp (arg1, 0)
13097 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13099 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13100 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13103 && real_isfinite (ra)
13104 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13106 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13107 const int prec = fmt->p;
13108 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13112 mpfr_init2 (m, prec);
13113 mpfr_from_real (m, ra, GMP_RNDN);
13114 mpfr_clear_flags ();
13115 inexact = func (m, n, m, rnd);
13116 result = do_mpfr_ckconv (m, type, inexact);
13124 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13125 the pointer *(ARG_QUO) and return the result. The type is taken
13126 from the type of ARG0 and is used for setting the precision of the
13127 calculation and results. */
13130 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13132 tree const type = TREE_TYPE (arg0);
13133 tree result = NULL_TREE;
13138 /* To proceed, MPFR must exactly represent the target floating point
13139 format, which only happens when the target base equals two. */
13140 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13141 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13142 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13144 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13145 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13147 if (real_isfinite (ra0) && real_isfinite (ra1))
13149 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13150 const int prec = fmt->p;
13151 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13156 mpfr_inits2 (prec, m0, m1, NULL);
13157 mpfr_from_real (m0, ra0, GMP_RNDN);
13158 mpfr_from_real (m1, ra1, GMP_RNDN);
13159 mpfr_clear_flags ();
13160 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13161 /* Remquo is independent of the rounding mode, so pass
13162 inexact=0 to do_mpfr_ckconv(). */
13163 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13164 mpfr_clears (m0, m1, NULL);
13167 /* MPFR calculates quo in the host's long so it may
13168 return more bits in quo than the target int can hold
13169 if sizeof(host long) > sizeof(target int). This can
13170 happen even for native compilers in LP64 mode. In
13171 these cases, modulo the quo value with the largest
13172 number that the target int can hold while leaving one
13173 bit for the sign. */
13174 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13175 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13177 /* Dereference the quo pointer argument. */
13178 arg_quo = build_fold_indirect_ref (arg_quo);
13179 /* Proceed iff a valid pointer type was passed in. */
13180 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13182 /* Set the value. */
13183 tree result_quo = fold_build2 (MODIFY_EXPR,
13184 TREE_TYPE (arg_quo), arg_quo,
13185 build_int_cst (NULL, integer_quo));
13186 TREE_SIDE_EFFECTS (result_quo) = 1;
13187 /* Combine the quo assignment with the rem. */
13188 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13189 result_quo, result_rem));
13197 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13198 resulting value as a tree with type TYPE. The mpfr precision is
13199 set to the precision of TYPE. We assume that this mpfr function
13200 returns zero if the result could be calculated exactly within the
13201 requested precision. In addition, the integer pointer represented
13202 by ARG_SG will be dereferenced and set to the appropriate signgam
13206 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13208 tree result = NULL_TREE;
13212 /* To proceed, MPFR must exactly represent the target floating point
13213 format, which only happens when the target base equals two. Also
13214 verify ARG is a constant and that ARG_SG is an int pointer. */
13215 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13216 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13217 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13218 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13220 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13222 /* In addition to NaN and Inf, the argument cannot be zero or a
13223 negative integer. */
13224 if (real_isfinite (ra)
13225 && ra->cl != rvc_zero
13226 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13228 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13229 const int prec = fmt->p;
13230 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13235 mpfr_init2 (m, prec);
13236 mpfr_from_real (m, ra, GMP_RNDN);
13237 mpfr_clear_flags ();
13238 inexact = mpfr_lgamma (m, &sg, m, rnd);
13239 result_lg = do_mpfr_ckconv (m, type, inexact);
13245 /* Dereference the arg_sg pointer argument. */
13246 arg_sg = build_fold_indirect_ref (arg_sg);
13247 /* Assign the signgam value into *arg_sg. */
13248 result_sg = fold_build2 (MODIFY_EXPR,
13249 TREE_TYPE (arg_sg), arg_sg,
13250 build_int_cst (NULL, sg));
13251 TREE_SIDE_EFFECTS (result_sg) = 1;
13252 /* Combine the signgam assignment with the lgamma result. */
13253 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13254 result_sg, result_lg));
13263 The functions below provide an alternate interface for folding
13264 builtin function calls presented as GIMPLE_CALL statements rather
13265 than as CALL_EXPRs. The folded result is still expressed as a
13266 tree. There is too much code duplication in the handling of
13267 varargs functions, and a more intrusive re-factoring would permit
13268 better sharing of code between the tree and statement-based
13269 versions of these functions. */
13271 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13272 along with N new arguments specified as the "..." parameters. SKIP
13273 is the number of arguments in STMT to be omitted. This function is used
13274 to do varargs-to-varargs transformations. */
13277 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13279 int oldnargs = gimple_call_num_args (stmt);
13280 int nargs = oldnargs - skip + n;
13281 tree fntype = TREE_TYPE (fndecl);
13282 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13287 buffer = XALLOCAVEC (tree, nargs);
13289 for (i = 0; i < n; i++)
13290 buffer[i] = va_arg (ap, tree);
13292 for (j = skip; j < oldnargs; j++, i++)
13293 buffer[i] = gimple_call_arg (stmt, j);
13295 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13298 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13299 a normal call should be emitted rather than expanding the function
13300 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13303 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13305 tree dest, size, len, fn, fmt, flag;
13306 const char *fmt_str;
13307 int nargs = gimple_call_num_args (stmt);
13309 /* Verify the required arguments in the original call. */
13312 dest = gimple_call_arg (stmt, 0);
13313 if (!validate_arg (dest, POINTER_TYPE))
13315 flag = gimple_call_arg (stmt, 1);
13316 if (!validate_arg (flag, INTEGER_TYPE))
13318 size = gimple_call_arg (stmt, 2);
13319 if (!validate_arg (size, INTEGER_TYPE))
13321 fmt = gimple_call_arg (stmt, 3);
13322 if (!validate_arg (fmt, POINTER_TYPE))
13325 if (! host_integerp (size, 1))
13330 if (!init_target_chars ())
13333 /* Check whether the format is a literal string constant. */
13334 fmt_str = c_getstr (fmt);
13335 if (fmt_str != NULL)
13337 /* If the format doesn't contain % args or %%, we know the size. */
13338 if (strchr (fmt_str, target_percent) == 0)
13340 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13341 len = build_int_cstu (size_type_node, strlen (fmt_str));
13343 /* If the format is "%s" and first ... argument is a string literal,
13344 we know the size too. */
13345 else if (fcode == BUILT_IN_SPRINTF_CHK
13346 && strcmp (fmt_str, target_percent_s) == 0)
13352 arg = gimple_call_arg (stmt, 4);
13353 if (validate_arg (arg, POINTER_TYPE))
13355 len = c_strlen (arg, 1);
13356 if (! len || ! host_integerp (len, 1))
13363 if (! integer_all_onesp (size))
13365 if (! len || ! tree_int_cst_lt (len, size))
13369 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13370 or if format doesn't contain % chars or is "%s". */
13371 if (! integer_zerop (flag))
13373 if (fmt_str == NULL)
13375 if (strchr (fmt_str, target_percent) != NULL
13376 && strcmp (fmt_str, target_percent_s))
13380 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13381 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13382 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13386 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13389 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13390 a normal call should be emitted rather than expanding the function
13391 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13392 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13393 passed as second argument. */
13396 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13397 enum built_in_function fcode)
13399 tree dest, size, len, fn, fmt, flag;
13400 const char *fmt_str;
13402 /* Verify the required arguments in the original call. */
13403 if (gimple_call_num_args (stmt) < 5)
13405 dest = gimple_call_arg (stmt, 0);
13406 if (!validate_arg (dest, POINTER_TYPE))
13408 len = gimple_call_arg (stmt, 1);
13409 if (!validate_arg (len, INTEGER_TYPE))
13411 flag = gimple_call_arg (stmt, 2);
13412 if (!validate_arg (flag, INTEGER_TYPE))
13414 size = gimple_call_arg (stmt, 3);
13415 if (!validate_arg (size, INTEGER_TYPE))
13417 fmt = gimple_call_arg (stmt, 4);
13418 if (!validate_arg (fmt, POINTER_TYPE))
13421 if (! host_integerp (size, 1))
13424 if (! integer_all_onesp (size))
13426 if (! host_integerp (len, 1))
13428 /* If LEN is not constant, try MAXLEN too.
13429 For MAXLEN only allow optimizing into non-_ocs function
13430 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13431 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13437 if (tree_int_cst_lt (size, maxlen))
13441 if (!init_target_chars ())
13444 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13445 or if format doesn't contain % chars or is "%s". */
13446 if (! integer_zerop (flag))
13448 fmt_str = c_getstr (fmt);
13449 if (fmt_str == NULL)
13451 if (strchr (fmt_str, target_percent) != NULL
13452 && strcmp (fmt_str, target_percent_s))
13456 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13458 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13459 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13463 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13466 /* Builtins with folding operations that operate on "..." arguments
13467 need special handling; we need to store the arguments in a convenient
13468 data structure before attempting any folding. Fortunately there are
13469 only a few builtins that fall into this category. FNDECL is the
13470 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13471 result of the function call is ignored. */
13474 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13476 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13477 tree ret = NULL_TREE;
13481 case BUILT_IN_SPRINTF_CHK:
13482 case BUILT_IN_VSPRINTF_CHK:
13483 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13486 case BUILT_IN_SNPRINTF_CHK:
13487 case BUILT_IN_VSNPRINTF_CHK:
13488 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13495 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13496 TREE_NO_WARNING (ret) = 1;
13502 /* A wrapper function for builtin folding that prevents warnings for
13503 "statement without effect" and the like, caused by removing the
13504 call node earlier than the warning is generated. */
13507 fold_call_stmt (gimple stmt, bool ignore)
13509 tree ret = NULL_TREE;
13510 tree fndecl = gimple_call_fndecl (stmt);
13512 && TREE_CODE (fndecl) == FUNCTION_DECL
13513 && DECL_BUILT_IN (fndecl)
13514 && !gimple_call_va_arg_pack_p (stmt))
13516 int nargs = gimple_call_num_args (stmt);
13518 /* FIXME: Don't use a list in this interface. */
13519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13521 tree arglist = NULL_TREE;
13523 for (i = nargs - 1; i >= 0; i--)
13524 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13525 return targetm.fold_builtin (fndecl, arglist, ignore);
13529 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13531 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13533 for (i = 0; i < nargs; i++)
13534 args[i] = gimple_call_arg (stmt, i);
13535 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13538 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13541 /* Propagate location information from original call to
13542 expansion of builtin. Otherwise things like
13543 maybe_emit_chk_warning, that operate on the expansion
13544 of a builtin, will use the wrong location information. */
13545 if (gimple_has_location (stmt))
13547 tree realret = ret;
13548 if (TREE_CODE (ret) == NOP_EXPR)
13549 realret = TREE_OPERAND (ret, 0);
13550 if (CAN_HAVE_LOCATION_P (realret)
13551 && !EXPR_HAS_LOCATION (realret))
13552 SET_EXPR_LOCATION (realret, gimple_location (stmt));