1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
327 align = MIN (inner, DECL_ALIGN (exp));
328 #ifdef CONSTANT_ALIGNMENT
329 else if (CONSTANT_CLASS_P (exp))
330 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
332 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
333 || TREE_CODE (exp) == INDIRECT_REF)
334 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
336 align = MIN (align, inner);
337 return MIN (align, max_align);
340 /* Returns true iff we can trust that alignment information has been
341 calculated properly. */
344 can_trust_pointer_alignment (void)
346 /* We rely on TER to compute accurate alignment information. */
347 return (optimize && flag_tree_ter);
350 /* Return the alignment in bits of EXP, a pointer valued expression.
351 But don't return more than MAX_ALIGN no matter what.
352 The alignment returned is, by default, the alignment of the thing that
353 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
355 Otherwise, look at the expression to see if we can do better, i.e., if the
356 expression is actually pointing at an object whose alignment is tighter. */
359 get_pointer_alignment (tree exp, unsigned int max_align)
361 unsigned int align, inner;
363 if (!can_trust_pointer_alignment ())
366 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
369 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
370 align = MIN (align, max_align);
374 switch (TREE_CODE (exp))
377 exp = TREE_OPERAND (exp, 0);
378 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
381 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
382 align = MIN (inner, max_align);
385 case POINTER_PLUS_EXPR:
386 /* If sum of pointer + int, restrict our maximum alignment to that
387 imposed by the integer. If not, we can't do any better than
389 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
392 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
393 & (max_align / BITS_PER_UNIT - 1))
397 exp = TREE_OPERAND (exp, 0);
401 /* See what we are pointing at and look at its alignment. */
402 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
410 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
411 way, because it could contain a zero byte in the middle.
412 TREE_STRING_LENGTH is the size of the character array, not the string.
414 ONLY_VALUE should be nonzero if the result is not going to be emitted
415 into the instruction stream and zero if it is going to be expanded.
416 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
417 is returned, otherwise NULL, since
418 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
419 evaluate the side-effects.
421 The value returned is of type `ssizetype'.
423 Unfortunately, string_constant can't access the values of const char
424 arrays with initializers, so neither can we do so here. */
427 c_strlen (tree src, int only_value)
430 HOST_WIDE_INT offset;
435 if (TREE_CODE (src) == COND_EXPR
436 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
440 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
441 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
442 if (tree_int_cst_equal (len1, len2))
446 if (TREE_CODE (src) == COMPOUND_EXPR
447 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
448 return c_strlen (TREE_OPERAND (src, 1), only_value);
450 src = string_constant (src, &offset_node);
454 max = TREE_STRING_LENGTH (src) - 1;
455 ptr = TREE_STRING_POINTER (src);
457 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
459 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
460 compute the offset to the following null if we don't know where to
461 start searching for it. */
464 for (i = 0; i < max; i++)
468 /* We don't know the starting offset, but we do know that the string
469 has no internal zero bytes. We can assume that the offset falls
470 within the bounds of the string; otherwise, the programmer deserves
471 what he gets. Subtract the offset from the length of the string,
472 and return that. This would perhaps not be valid if we were dealing
473 with named arrays in addition to literal string constants. */
475 return size_diffop_loc (input_location, size_int (max), offset_node);
478 /* We have a known offset into the string. Start searching there for
479 a null character if we can represent it as a single HOST_WIDE_INT. */
480 if (offset_node == 0)
482 else if (! host_integerp (offset_node, 0))
485 offset = tree_low_cst (offset_node, 0);
487 /* If the offset is known to be out of bounds, warn, and call strlen at
489 if (offset < 0 || offset > max)
491 /* Suppress multiple warnings for propagated constant strings. */
492 if (! TREE_NO_WARNING (src))
494 warning (0, "offset outside bounds of constant string");
495 TREE_NO_WARNING (src) = 1;
500 /* Use strlen to search for the first zero byte. Since any strings
501 constructed with build_string will have nulls appended, we win even
502 if we get handed something like (char[4])"abcd".
504 Since OFFSET is our starting index into the string, no further
505 calculation is needed. */
506 return ssize_int (strlen (ptr + offset));
509 /* Return a char pointer for a C string if it is a string constant
510 or sum of string constant and integer constant. */
517 src = string_constant (src, &offset_node);
521 if (offset_node == 0)
522 return TREE_STRING_POINTER (src);
523 else if (!host_integerp (offset_node, 1)
524 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
527 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
530 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
531 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
534 c_readstr (const char *str, enum machine_mode mode)
540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
545 for (i = 0; i < GET_MODE_SIZE (mode); i++)
548 if (WORDS_BIG_ENDIAN)
549 j = GET_MODE_SIZE (mode) - i - 1;
550 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
551 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
552 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
554 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
557 ch = (unsigned char) str[i];
558 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
560 return immed_double_const (c[0], c[1], mode);
563 /* Cast a target constant CST to target CHAR and if that value fits into
564 host char type, return zero and put that value into variable pointed to by
568 target_char_cast (tree cst, char *p)
570 unsigned HOST_WIDE_INT val, hostval;
572 if (!host_integerp (cst, 1)
573 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
576 val = tree_low_cst (cst, 1);
577 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
578 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
581 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
582 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
591 /* Similar to save_expr, but assumes that arbitrary code is not executed
592 in between the multiple evaluations. In particular, we assume that a
593 non-addressable local variable will not be modified. */
596 builtin_save_expr (tree exp)
598 if (TREE_ADDRESSABLE (exp) == 0
599 && (TREE_CODE (exp) == PARM_DECL
600 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
603 return save_expr (exp);
606 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
607 times to get the address of either a higher stack frame, or a return
608 address located within it (depending on FNDECL_CODE). */
611 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
615 #ifdef INITIAL_FRAME_ADDRESS_RTX
616 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
620 /* For a zero count with __builtin_return_address, we don't care what
621 frame address we return, because target-specific definitions will
622 override us. Therefore frame pointer elimination is OK, and using
623 the soft frame pointer is OK.
625 For a nonzero count, or a zero count with __builtin_frame_address,
626 we require a stable offset from the current frame pointer to the
627 previous one, so we must use the hard frame pointer, and
628 we must disable frame pointer elimination. */
629 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
630 tem = frame_pointer_rtx;
633 tem = hard_frame_pointer_rtx;
635 /* Tell reload not to eliminate the frame pointer. */
636 crtl->accesses_prior_frames = 1;
640 /* Some machines need special handling before we can access
641 arbitrary frames. For example, on the SPARC, we must first flush
642 all register windows to the stack. */
643 #ifdef SETUP_FRAME_ADDRESSES
645 SETUP_FRAME_ADDRESSES ();
648 /* On the SPARC, the return address is not in the frame, it is in a
649 register. There is no way to access it off of the current frame
650 pointer, but it can be accessed off the previous frame pointer by
651 reading the value from the register window save area. */
652 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
653 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
657 /* Scan back COUNT frames to the specified frame. */
658 for (i = 0; i < count; i++)
660 /* Assume the dynamic chain pointer is in the word that the
661 frame address points to, unless otherwise specified. */
662 #ifdef DYNAMIC_CHAIN_ADDRESS
663 tem = DYNAMIC_CHAIN_ADDRESS (tem);
665 tem = memory_address (Pmode, tem);
666 tem = gen_frame_mem (Pmode, tem);
667 tem = copy_to_reg (tem);
670 /* For __builtin_frame_address, return what we've got. But, on
671 the SPARC for example, we may have to add a bias. */
672 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
673 #ifdef FRAME_ADDR_RTX
674 return FRAME_ADDR_RTX (tem);
679 /* For __builtin_return_address, get the return address from that frame. */
680 #ifdef RETURN_ADDR_RTX
681 tem = RETURN_ADDR_RTX (count, tem);
683 tem = memory_address (Pmode,
684 plus_constant (tem, GET_MODE_SIZE (Pmode)));
685 tem = gen_frame_mem (Pmode, tem);
690 /* Alias set used for setjmp buffer. */
691 static alias_set_type setjmp_alias_set = -1;
693 /* Construct the leading half of a __builtin_setjmp call. Control will
694 return to RECEIVER_LABEL. This is also called directly by the SJLJ
695 exception handling code. */
698 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
700 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
704 if (setjmp_alias_set == -1)
705 setjmp_alias_set = new_alias_set ();
707 buf_addr = convert_memory_address (Pmode, buf_addr);
709 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
711 /* We store the frame pointer and the address of receiver_label in
712 the buffer and use the rest of it for the stack save area, which
713 is machine-dependent. */
715 mem = gen_rtx_MEM (Pmode, buf_addr);
716 set_mem_alias_set (mem, setjmp_alias_set);
717 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
719 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
720 set_mem_alias_set (mem, setjmp_alias_set);
722 emit_move_insn (validize_mem (mem),
723 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
725 stack_save = gen_rtx_MEM (sa_mode,
726 plus_constant (buf_addr,
727 2 * GET_MODE_SIZE (Pmode)));
728 set_mem_alias_set (stack_save, setjmp_alias_set);
729 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
731 /* If there is further processing to do, do it. */
732 #ifdef HAVE_builtin_setjmp_setup
733 if (HAVE_builtin_setjmp_setup)
734 emit_insn (gen_builtin_setjmp_setup (buf_addr));
737 /* Tell optimize_save_area_alloca that extra work is going to
738 need to go on during alloca. */
739 cfun->calls_setjmp = 1;
741 /* We have a nonlocal label. */
742 cfun->has_nonlocal_label = 1;
745 /* Construct the trailing part of a __builtin_setjmp call. This is
746 also called directly by the SJLJ exception handling code. */
749 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
753 /* Clobber the FP when we get here, so we have to make sure it's
754 marked as used by this function. */
755 emit_use (hard_frame_pointer_rtx);
757 /* Mark the static chain as clobbered here so life information
758 doesn't get messed up for it. */
759 chain = targetm.calls.static_chain (current_function_decl, true);
760 if (chain && REG_P (chain))
761 emit_clobber (chain);
763 /* Now put in the code to restore the frame pointer, and argument
764 pointer, if needed. */
765 #ifdef HAVE_nonlocal_goto
766 if (! HAVE_nonlocal_goto)
769 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
770 /* This might change the hard frame pointer in ways that aren't
771 apparent to early optimization passes, so force a clobber. */
772 emit_clobber (hard_frame_pointer_rtx);
775 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
776 if (fixed_regs[ARG_POINTER_REGNUM])
778 #ifdef ELIMINABLE_REGS
780 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
782 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
783 if (elim_regs[i].from == ARG_POINTER_REGNUM
784 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
787 if (i == ARRAY_SIZE (elim_regs))
790 /* Now restore our arg pointer from the address at which it
791 was saved in our stack frame. */
792 emit_move_insn (crtl->args.internal_arg_pointer,
793 copy_to_reg (get_arg_pointer_save_area ()));
798 #ifdef HAVE_builtin_setjmp_receiver
799 if (HAVE_builtin_setjmp_receiver)
800 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
803 #ifdef HAVE_nonlocal_goto_receiver
804 if (HAVE_nonlocal_goto_receiver)
805 emit_insn (gen_nonlocal_goto_receiver ());
810 /* We must not allow the code we just generated to be reordered by
811 scheduling. Specifically, the update of the frame pointer must
812 happen immediately, not later. */
813 emit_insn (gen_blockage ());
816 /* __builtin_longjmp is passed a pointer to an array of five words (not
817 all will be used on all machines). It operates similarly to the C
818 library function of the same name, but is more efficient. Much of
819 the code below is copied from the handling of non-local gotos. */
822 expand_builtin_longjmp (rtx buf_addr, rtx value)
824 rtx fp, lab, stack, insn, last;
825 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
827 /* DRAP is needed for stack realign if longjmp is expanded to current
829 if (SUPPORTS_STACK_ALIGNMENT)
830 crtl->need_drap = true;
832 if (setjmp_alias_set == -1)
833 setjmp_alias_set = new_alias_set ();
835 buf_addr = convert_memory_address (Pmode, buf_addr);
837 buf_addr = force_reg (Pmode, buf_addr);
839 /* We require that the user must pass a second argument of 1, because
840 that is what builtin_setjmp will return. */
841 gcc_assert (value == const1_rtx);
843 last = get_last_insn ();
844 #ifdef HAVE_builtin_longjmp
845 if (HAVE_builtin_longjmp)
846 emit_insn (gen_builtin_longjmp (buf_addr));
850 fp = gen_rtx_MEM (Pmode, buf_addr);
851 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
852 GET_MODE_SIZE (Pmode)));
854 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
855 2 * GET_MODE_SIZE (Pmode)));
856 set_mem_alias_set (fp, setjmp_alias_set);
857 set_mem_alias_set (lab, setjmp_alias_set);
858 set_mem_alias_set (stack, setjmp_alias_set);
860 /* Pick up FP, label, and SP from the block and jump. This code is
861 from expand_goto in stmt.c; see there for detailed comments. */
862 #ifdef HAVE_nonlocal_goto
863 if (HAVE_nonlocal_goto)
864 /* We have to pass a value to the nonlocal_goto pattern that will
865 get copied into the static_chain pointer, but it does not matter
866 what that value is, because builtin_setjmp does not use it. */
867 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
871 lab = copy_to_reg (lab);
873 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
874 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
876 emit_move_insn (hard_frame_pointer_rtx, fp);
877 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
879 emit_use (hard_frame_pointer_rtx);
880 emit_use (stack_pointer_rtx);
881 emit_indirect_jump (lab);
885 /* Search backwards and mark the jump insn as a non-local goto.
886 Note that this precludes the use of __builtin_longjmp to a
887 __builtin_setjmp target in the same function. However, we've
888 already cautioned the user that these functions are for
889 internal exception handling use only. */
890 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
892 gcc_assert (insn != last);
896 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
899 else if (CALL_P (insn))
904 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
905 and the address of the save area. */
908 expand_builtin_nonlocal_goto (tree exp)
910 tree t_label, t_save_area;
911 rtx r_label, r_save_area, r_fp, r_sp, insn;
913 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
916 t_label = CALL_EXPR_ARG (exp, 0);
917 t_save_area = CALL_EXPR_ARG (exp, 1);
919 r_label = expand_normal (t_label);
920 r_label = convert_memory_address (Pmode, r_label);
921 r_save_area = expand_normal (t_save_area);
922 r_save_area = convert_memory_address (Pmode, r_save_area);
923 /* Copy the address of the save location to a register just in case it was based
924 on the frame pointer. */
925 r_save_area = copy_to_reg (r_save_area);
926 r_fp = gen_rtx_MEM (Pmode, r_save_area);
927 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
928 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
930 crtl->has_nonlocal_goto = 1;
932 #ifdef HAVE_nonlocal_goto
933 /* ??? We no longer need to pass the static chain value, afaik. */
934 if (HAVE_nonlocal_goto)
935 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
939 r_label = copy_to_reg (r_label);
941 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
942 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
944 /* Restore frame pointer for containing function.
945 This sets the actual hard register used for the frame pointer
946 to the location of the function's incoming static chain info.
947 The non-local goto handler will then adjust it to contain the
948 proper value and reload the argument pointer, if needed. */
949 emit_move_insn (hard_frame_pointer_rtx, r_fp);
950 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
952 /* USE of hard_frame_pointer_rtx added for consistency;
953 not clear if really needed. */
954 emit_use (hard_frame_pointer_rtx);
955 emit_use (stack_pointer_rtx);
957 /* If the architecture is using a GP register, we must
958 conservatively assume that the target function makes use of it.
959 The prologue of functions with nonlocal gotos must therefore
960 initialize the GP register to the appropriate value, and we
961 must then make sure that this value is live at the point
962 of the jump. (Note that this doesn't necessarily apply
963 to targets with a nonlocal_goto pattern; they are free
964 to implement it in their own way. Note also that this is
965 a no-op if the GP register is a global invariant.) */
966 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
967 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
968 emit_use (pic_offset_table_rtx);
970 emit_indirect_jump (r_label);
973 /* Search backwards to the jump insn and mark it as a
975 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
979 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
982 else if (CALL_P (insn))
989 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
990 (not all will be used on all machines) that was passed to __builtin_setjmp.
991 It updates the stack pointer in that block to correspond to the current
995 expand_builtin_update_setjmp_buf (rtx buf_addr)
997 enum machine_mode sa_mode = Pmode;
1001 #ifdef HAVE_save_stack_nonlocal
1002 if (HAVE_save_stack_nonlocal)
1003 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1005 #ifdef STACK_SAVEAREA_MODE
1006 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1010 = gen_rtx_MEM (sa_mode,
1013 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1017 emit_insn (gen_setjmp ());
1020 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1023 /* Expand a call to __builtin_prefetch. For a target that does not support
1024 data prefetch, evaluate the memory address argument in case it has side
1028 expand_builtin_prefetch (tree exp)
1030 tree arg0, arg1, arg2;
1034 if (!validate_arglist (exp, POINTER_TYPE, 0))
1037 arg0 = CALL_EXPR_ARG (exp, 0);
1039 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1040 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1042 nargs = call_expr_nargs (exp);
1044 arg1 = CALL_EXPR_ARG (exp, 1);
1046 arg1 = integer_zero_node;
1048 arg2 = CALL_EXPR_ARG (exp, 2);
1050 arg2 = build_int_cst (NULL_TREE, 3);
1052 /* Argument 0 is an address. */
1053 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1055 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1056 if (TREE_CODE (arg1) != INTEGER_CST)
1058 error ("second argument to %<__builtin_prefetch%> must be a constant");
1059 arg1 = integer_zero_node;
1061 op1 = expand_normal (arg1);
1062 /* Argument 1 must be either zero or one. */
1063 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1065 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1070 /* Argument 2 (locality) must be a compile-time constant int. */
1071 if (TREE_CODE (arg2) != INTEGER_CST)
1073 error ("third argument to %<__builtin_prefetch%> must be a constant");
1074 arg2 = integer_zero_node;
1076 op2 = expand_normal (arg2);
1077 /* Argument 2 must be 0, 1, 2, or 3. */
1078 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1080 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1084 #ifdef HAVE_prefetch
1087 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1089 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1090 || (GET_MODE (op0) != Pmode))
1092 op0 = convert_memory_address (Pmode, op0);
1093 op0 = force_reg (Pmode, op0);
1095 emit_insn (gen_prefetch (op0, op1, op2));
1099 /* Don't do anything with direct references to volatile memory, but
1100 generate code to handle other side effects. */
1101 if (!MEM_P (op0) && side_effects_p (op0))
1105 /* Get a MEM rtx for expression EXP which is the address of an operand
1106 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1107 the maximum length of the block of memory that might be accessed or
1111 get_memory_rtx (tree exp, tree len)
1113 tree orig_exp = exp;
1117 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1118 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1119 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1120 exp = TREE_OPERAND (exp, 0);
1122 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1123 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1125 /* Get an expression we can use to find the attributes to assign to MEM.
1126 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1127 we can. First remove any nops. */
1128 while (CONVERT_EXPR_P (exp)
1129 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1130 exp = TREE_OPERAND (exp, 0);
1133 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1134 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1135 && host_integerp (TREE_OPERAND (exp, 1), 0)
1136 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1137 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1138 else if (TREE_CODE (exp) == ADDR_EXPR)
1139 exp = TREE_OPERAND (exp, 0);
1140 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1141 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1145 /* Honor attributes derived from exp, except for the alias set
1146 (as builtin stringops may alias with anything) and the size
1147 (as stringops may access multiple array elements). */
1150 set_mem_attributes (mem, exp, 0);
1153 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1155 /* Allow the string and memory builtins to overflow from one
1156 field into another, see http://gcc.gnu.org/PR23561.
1157 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1158 memory accessed by the string or memory builtin will fit
1159 within the field. */
1160 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1162 tree mem_expr = MEM_EXPR (mem);
1163 HOST_WIDE_INT offset = -1, length = -1;
1166 while (TREE_CODE (inner) == ARRAY_REF
1167 || CONVERT_EXPR_P (inner)
1168 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1169 || TREE_CODE (inner) == SAVE_EXPR)
1170 inner = TREE_OPERAND (inner, 0);
1172 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1174 if (MEM_OFFSET (mem)
1175 && CONST_INT_P (MEM_OFFSET (mem)))
1176 offset = INTVAL (MEM_OFFSET (mem));
1178 if (offset >= 0 && len && host_integerp (len, 0))
1179 length = tree_low_cst (len, 0);
1181 while (TREE_CODE (inner) == COMPONENT_REF)
1183 tree field = TREE_OPERAND (inner, 1);
1184 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1185 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1187 /* Bitfields are generally not byte-addressable. */
1188 gcc_assert (!DECL_BIT_FIELD (field)
1189 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1190 % BITS_PER_UNIT) == 0
1191 && host_integerp (DECL_SIZE (field), 0)
1192 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1193 % BITS_PER_UNIT) == 0));
1195 /* If we can prove that the memory starting at XEXP (mem, 0) and
1196 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1197 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1198 fields without DECL_SIZE_UNIT like flexible array members. */
1200 && DECL_SIZE_UNIT (field)
1201 && host_integerp (DECL_SIZE_UNIT (field), 0))
1204 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1207 && offset + length <= size)
1212 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1213 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1214 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1222 mem_expr = TREE_OPERAND (mem_expr, 0);
1223 inner = TREE_OPERAND (inner, 0);
1226 if (mem_expr == NULL)
1228 if (mem_expr != MEM_EXPR (mem))
1230 set_mem_expr (mem, mem_expr);
1231 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1234 set_mem_alias_set (mem, 0);
1235 set_mem_size (mem, NULL_RTX);
1241 /* Built-in functions to perform an untyped call and return. */
1243 /* For each register that may be used for calling a function, this
1244 gives a mode used to copy the register's value. VOIDmode indicates
1245 the register is not used for calling a function. If the machine
1246 has register windows, this gives only the outbound registers.
1247 INCOMING_REGNO gives the corresponding inbound register. */
1248 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1250 /* For each register that may be used for returning values, this gives
1251 a mode used to copy the register's value. VOIDmode indicates the
1252 register is not used for returning values. If the machine has
1253 register windows, this gives only the outbound registers.
1254 INCOMING_REGNO gives the corresponding inbound register. */
1255 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1257 /* Return the size required for the block returned by __builtin_apply_args,
1258 and initialize apply_args_mode. */
1261 apply_args_size (void)
1263 static int size = -1;
1266 enum machine_mode mode;
1268 /* The values computed by this function never change. */
1271 /* The first value is the incoming arg-pointer. */
1272 size = GET_MODE_SIZE (Pmode);
1274 /* The second value is the structure value address unless this is
1275 passed as an "invisible" first argument. */
1276 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1277 size += GET_MODE_SIZE (Pmode);
1279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1280 if (FUNCTION_ARG_REGNO_P (regno))
1282 mode = reg_raw_mode[regno];
1284 gcc_assert (mode != VOIDmode);
1286 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1287 if (size % align != 0)
1288 size = CEIL (size, align) * align;
1289 size += GET_MODE_SIZE (mode);
1290 apply_args_mode[regno] = mode;
1294 apply_args_mode[regno] = VOIDmode;
1300 /* Return the size required for the block returned by __builtin_apply,
1301 and initialize apply_result_mode. */
1304 apply_result_size (void)
1306 static int size = -1;
1308 enum machine_mode mode;
1310 /* The values computed by this function never change. */
1315 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1316 if (FUNCTION_VALUE_REGNO_P (regno))
1318 mode = reg_raw_mode[regno];
1320 gcc_assert (mode != VOIDmode);
1322 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1323 if (size % align != 0)
1324 size = CEIL (size, align) * align;
1325 size += GET_MODE_SIZE (mode);
1326 apply_result_mode[regno] = mode;
1329 apply_result_mode[regno] = VOIDmode;
1331 /* Allow targets that use untyped_call and untyped_return to override
1332 the size so that machine-specific information can be stored here. */
1333 #ifdef APPLY_RESULT_SIZE
1334 size = APPLY_RESULT_SIZE;
1340 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1341 /* Create a vector describing the result block RESULT. If SAVEP is true,
1342 the result block is used to save the values; otherwise it is used to
1343 restore the values. */
1346 result_vector (int savep, rtx result)
1348 int regno, size, align, nelts;
1349 enum machine_mode mode;
1351 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1354 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1355 if ((mode = apply_result_mode[regno]) != VOIDmode)
1357 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1358 if (size % align != 0)
1359 size = CEIL (size, align) * align;
1360 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1361 mem = adjust_address (result, mode, size);
1362 savevec[nelts++] = (savep
1363 ? gen_rtx_SET (VOIDmode, mem, reg)
1364 : gen_rtx_SET (VOIDmode, reg, mem));
1365 size += GET_MODE_SIZE (mode);
1367 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1369 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1371 /* Save the state required to perform an untyped call with the same
1372 arguments as were passed to the current function. */
1375 expand_builtin_apply_args_1 (void)
1378 int size, align, regno;
1379 enum machine_mode mode;
1380 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1382 /* Create a block where the arg-pointer, structure value address,
1383 and argument registers can be saved. */
1384 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1386 /* Walk past the arg-pointer and structure value address. */
1387 size = GET_MODE_SIZE (Pmode);
1388 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1389 size += GET_MODE_SIZE (Pmode);
1391 /* Save each register used in calling a function to the block. */
1392 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1393 if ((mode = apply_args_mode[regno]) != VOIDmode)
1395 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1396 if (size % align != 0)
1397 size = CEIL (size, align) * align;
1399 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1401 emit_move_insn (adjust_address (registers, mode, size), tem);
1402 size += GET_MODE_SIZE (mode);
1405 /* Save the arg pointer to the block. */
1406 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1407 #ifdef STACK_GROWS_DOWNWARD
1408 /* We need the pointer as the caller actually passed them to us, not
1409 as we might have pretended they were passed. Make sure it's a valid
1410 operand, as emit_move_insn isn't expected to handle a PLUS. */
1412 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1415 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1417 size = GET_MODE_SIZE (Pmode);
1419 /* Save the structure value address unless this is passed as an
1420 "invisible" first argument. */
1421 if (struct_incoming_value)
1423 emit_move_insn (adjust_address (registers, Pmode, size),
1424 copy_to_reg (struct_incoming_value));
1425 size += GET_MODE_SIZE (Pmode);
1428 /* Return the address of the block. */
1429 return copy_addr_to_reg (XEXP (registers, 0));
1432 /* __builtin_apply_args returns block of memory allocated on
1433 the stack into which is stored the arg pointer, structure
1434 value address, static chain, and all the registers that might
1435 possibly be used in performing a function call. The code is
1436 moved to the start of the function so the incoming values are
1440 expand_builtin_apply_args (void)
1442 /* Don't do __builtin_apply_args more than once in a function.
1443 Save the result of the first call and reuse it. */
1444 if (apply_args_value != 0)
1445 return apply_args_value;
1447 /* When this function is called, it means that registers must be
1448 saved on entry to this function. So we migrate the
1449 call to the first insn of this function. */
1454 temp = expand_builtin_apply_args_1 ();
1458 apply_args_value = temp;
1460 /* Put the insns after the NOTE that starts the function.
1461 If this is inside a start_sequence, make the outer-level insn
1462 chain current, so the code is placed at the start of the
1463 function. If internal_arg_pointer is a non-virtual pseudo,
1464 it needs to be placed after the function that initializes
1466 push_topmost_sequence ();
1467 if (REG_P (crtl->args.internal_arg_pointer)
1468 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1469 emit_insn_before (seq, parm_birth_insn);
1471 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1472 pop_topmost_sequence ();
1477 /* Perform an untyped call and save the state required to perform an
1478 untyped return of whatever value was returned by the given function. */
1481 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1483 int size, align, regno;
1484 enum machine_mode mode;
1485 rtx incoming_args, result, reg, dest, src, call_insn;
1486 rtx old_stack_level = 0;
1487 rtx call_fusage = 0;
1488 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1490 arguments = convert_memory_address (Pmode, arguments);
1492 /* Create a block where the return registers can be saved. */
1493 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1495 /* Fetch the arg pointer from the ARGUMENTS block. */
1496 incoming_args = gen_reg_rtx (Pmode);
1497 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1498 #ifndef STACK_GROWS_DOWNWARD
1499 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1500 incoming_args, 0, OPTAB_LIB_WIDEN);
1503 /* Push a new argument block and copy the arguments. Do not allow
1504 the (potential) memcpy call below to interfere with our stack
1506 do_pending_stack_adjust ();
1509 /* Save the stack with nonlocal if available. */
1510 #ifdef HAVE_save_stack_nonlocal
1511 if (HAVE_save_stack_nonlocal)
1512 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1515 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1517 /* Allocate a block of memory onto the stack and copy the memory
1518 arguments to the outgoing arguments address. */
1519 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1521 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1522 may have already set current_function_calls_alloca to true.
1523 current_function_calls_alloca won't be set if argsize is zero,
1524 so we have to guarantee need_drap is true here. */
1525 if (SUPPORTS_STACK_ALIGNMENT)
1526 crtl->need_drap = true;
1528 dest = virtual_outgoing_args_rtx;
1529 #ifndef STACK_GROWS_DOWNWARD
1530 if (CONST_INT_P (argsize))
1531 dest = plus_constant (dest, -INTVAL (argsize));
1533 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1535 dest = gen_rtx_MEM (BLKmode, dest);
1536 set_mem_align (dest, PARM_BOUNDARY);
1537 src = gen_rtx_MEM (BLKmode, incoming_args);
1538 set_mem_align (src, PARM_BOUNDARY);
1539 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1541 /* Refer to the argument block. */
1543 arguments = gen_rtx_MEM (BLKmode, arguments);
1544 set_mem_align (arguments, PARM_BOUNDARY);
1546 /* Walk past the arg-pointer and structure value address. */
1547 size = GET_MODE_SIZE (Pmode);
1549 size += GET_MODE_SIZE (Pmode);
1551 /* Restore each of the registers previously saved. Make USE insns
1552 for each of these registers for use in making the call. */
1553 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1554 if ((mode = apply_args_mode[regno]) != VOIDmode)
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
1559 reg = gen_rtx_REG (mode, regno);
1560 emit_move_insn (reg, adjust_address (arguments, mode, size));
1561 use_reg (&call_fusage, reg);
1562 size += GET_MODE_SIZE (mode);
1565 /* Restore the structure value address unless this is passed as an
1566 "invisible" first argument. */
1567 size = GET_MODE_SIZE (Pmode);
1570 rtx value = gen_reg_rtx (Pmode);
1571 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1572 emit_move_insn (struct_value, value);
1573 if (REG_P (struct_value))
1574 use_reg (&call_fusage, struct_value);
1575 size += GET_MODE_SIZE (Pmode);
1578 /* All arguments and registers used for the call are set up by now! */
1579 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1581 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1582 and we don't want to load it into a register as an optimization,
1583 because prepare_call_address already did it if it should be done. */
1584 if (GET_CODE (function) != SYMBOL_REF)
1585 function = memory_address (FUNCTION_MODE, function);
1587 /* Generate the actual call instruction and save the return value. */
1588 #ifdef HAVE_untyped_call
1589 if (HAVE_untyped_call)
1590 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1591 result, result_vector (1, result)));
1594 #ifdef HAVE_call_value
1595 if (HAVE_call_value)
1599 /* Locate the unique return register. It is not possible to
1600 express a call that sets more than one return register using
1601 call_value; use untyped_call for that. In fact, untyped_call
1602 only needs to save the return registers in the given block. */
1603 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1604 if ((mode = apply_result_mode[regno]) != VOIDmode)
1606 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1608 valreg = gen_rtx_REG (mode, regno);
1611 emit_call_insn (GEN_CALL_VALUE (valreg,
1612 gen_rtx_MEM (FUNCTION_MODE, function),
1613 const0_rtx, NULL_RTX, const0_rtx));
1615 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1621 /* Find the CALL insn we just emitted, and attach the register usage
1623 call_insn = last_call_insn ();
1624 add_function_usage_to (call_insn, call_fusage);
1626 /* Restore the stack. */
1627 #ifdef HAVE_save_stack_nonlocal
1628 if (HAVE_save_stack_nonlocal)
1629 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1632 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1636 /* Return the address of the result block. */
1637 result = copy_addr_to_reg (XEXP (result, 0));
1638 return convert_memory_address (ptr_mode, result);
1641 /* Perform an untyped return. */
1644 expand_builtin_return (rtx result)
1646 int size, align, regno;
1647 enum machine_mode mode;
1649 rtx call_fusage = 0;
1651 result = convert_memory_address (Pmode, result);
1653 apply_result_size ();
1654 result = gen_rtx_MEM (BLKmode, result);
1656 #ifdef HAVE_untyped_return
1657 if (HAVE_untyped_return)
1659 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1665 /* Restore the return value and note that each value is used. */
1667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1668 if ((mode = apply_result_mode[regno]) != VOIDmode)
1670 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1671 if (size % align != 0)
1672 size = CEIL (size, align) * align;
1673 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1674 emit_move_insn (reg, adjust_address (result, mode, size));
1676 push_to_sequence (call_fusage);
1678 call_fusage = get_insns ();
1680 size += GET_MODE_SIZE (mode);
1683 /* Put the USE insns before the return. */
1684 emit_insn (call_fusage);
1686 /* Return whatever values was restored by jumping directly to the end
1688 expand_naked_return ();
1691 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1693 static enum type_class
1694 type_to_class (tree type)
1696 switch (TREE_CODE (type))
1698 case VOID_TYPE: return void_type_class;
1699 case INTEGER_TYPE: return integer_type_class;
1700 case ENUMERAL_TYPE: return enumeral_type_class;
1701 case BOOLEAN_TYPE: return boolean_type_class;
1702 case POINTER_TYPE: return pointer_type_class;
1703 case REFERENCE_TYPE: return reference_type_class;
1704 case OFFSET_TYPE: return offset_type_class;
1705 case REAL_TYPE: return real_type_class;
1706 case COMPLEX_TYPE: return complex_type_class;
1707 case FUNCTION_TYPE: return function_type_class;
1708 case METHOD_TYPE: return method_type_class;
1709 case RECORD_TYPE: return record_type_class;
1711 case QUAL_UNION_TYPE: return union_type_class;
1712 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1713 ? string_type_class : array_type_class);
1714 case LANG_TYPE: return lang_type_class;
1715 default: return no_type_class;
1719 /* Expand a call EXP to __builtin_classify_type. */
1722 expand_builtin_classify_type (tree exp)
1724 if (call_expr_nargs (exp))
1725 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1726 return GEN_INT (no_type_class);
1729 /* This helper macro, meant to be used in mathfn_built_in below,
1730 determines which among a set of three builtin math functions is
1731 appropriate for a given type mode. The `F' and `L' cases are
1732 automatically generated from the `double' case. */
1733 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1734 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1735 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1736 fcodel = BUILT_IN_MATHFN##L ; break;
1737 /* Similar to above, but appends _R after any F/L suffix. */
1738 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1739 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1740 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1741 fcodel = BUILT_IN_MATHFN##L_R ; break;
1743 /* Return mathematic function equivalent to FN but operating directly
1744 on TYPE, if available. If IMPLICIT is true find the function in
1745 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1746 can't do the conversion, return zero. */
1749 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1751 tree const *const fn_arr
1752 = implicit ? implicit_built_in_decls : built_in_decls;
1753 enum built_in_function fcode, fcodef, fcodel;
1757 CASE_MATHFN (BUILT_IN_ACOS)
1758 CASE_MATHFN (BUILT_IN_ACOSH)
1759 CASE_MATHFN (BUILT_IN_ASIN)
1760 CASE_MATHFN (BUILT_IN_ASINH)
1761 CASE_MATHFN (BUILT_IN_ATAN)
1762 CASE_MATHFN (BUILT_IN_ATAN2)
1763 CASE_MATHFN (BUILT_IN_ATANH)
1764 CASE_MATHFN (BUILT_IN_CBRT)
1765 CASE_MATHFN (BUILT_IN_CEIL)
1766 CASE_MATHFN (BUILT_IN_CEXPI)
1767 CASE_MATHFN (BUILT_IN_COPYSIGN)
1768 CASE_MATHFN (BUILT_IN_COS)
1769 CASE_MATHFN (BUILT_IN_COSH)
1770 CASE_MATHFN (BUILT_IN_DREM)
1771 CASE_MATHFN (BUILT_IN_ERF)
1772 CASE_MATHFN (BUILT_IN_ERFC)
1773 CASE_MATHFN (BUILT_IN_EXP)
1774 CASE_MATHFN (BUILT_IN_EXP10)
1775 CASE_MATHFN (BUILT_IN_EXP2)
1776 CASE_MATHFN (BUILT_IN_EXPM1)
1777 CASE_MATHFN (BUILT_IN_FABS)
1778 CASE_MATHFN (BUILT_IN_FDIM)
1779 CASE_MATHFN (BUILT_IN_FLOOR)
1780 CASE_MATHFN (BUILT_IN_FMA)
1781 CASE_MATHFN (BUILT_IN_FMAX)
1782 CASE_MATHFN (BUILT_IN_FMIN)
1783 CASE_MATHFN (BUILT_IN_FMOD)
1784 CASE_MATHFN (BUILT_IN_FREXP)
1785 CASE_MATHFN (BUILT_IN_GAMMA)
1786 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1787 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1788 CASE_MATHFN (BUILT_IN_HYPOT)
1789 CASE_MATHFN (BUILT_IN_ILOGB)
1790 CASE_MATHFN (BUILT_IN_INF)
1791 CASE_MATHFN (BUILT_IN_ISINF)
1792 CASE_MATHFN (BUILT_IN_J0)
1793 CASE_MATHFN (BUILT_IN_J1)
1794 CASE_MATHFN (BUILT_IN_JN)
1795 CASE_MATHFN (BUILT_IN_LCEIL)
1796 CASE_MATHFN (BUILT_IN_LDEXP)
1797 CASE_MATHFN (BUILT_IN_LFLOOR)
1798 CASE_MATHFN (BUILT_IN_LGAMMA)
1799 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1800 CASE_MATHFN (BUILT_IN_LLCEIL)
1801 CASE_MATHFN (BUILT_IN_LLFLOOR)
1802 CASE_MATHFN (BUILT_IN_LLRINT)
1803 CASE_MATHFN (BUILT_IN_LLROUND)
1804 CASE_MATHFN (BUILT_IN_LOG)
1805 CASE_MATHFN (BUILT_IN_LOG10)
1806 CASE_MATHFN (BUILT_IN_LOG1P)
1807 CASE_MATHFN (BUILT_IN_LOG2)
1808 CASE_MATHFN (BUILT_IN_LOGB)
1809 CASE_MATHFN (BUILT_IN_LRINT)
1810 CASE_MATHFN (BUILT_IN_LROUND)
1811 CASE_MATHFN (BUILT_IN_MODF)
1812 CASE_MATHFN (BUILT_IN_NAN)
1813 CASE_MATHFN (BUILT_IN_NANS)
1814 CASE_MATHFN (BUILT_IN_NEARBYINT)
1815 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1816 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1817 CASE_MATHFN (BUILT_IN_POW)
1818 CASE_MATHFN (BUILT_IN_POWI)
1819 CASE_MATHFN (BUILT_IN_POW10)
1820 CASE_MATHFN (BUILT_IN_REMAINDER)
1821 CASE_MATHFN (BUILT_IN_REMQUO)
1822 CASE_MATHFN (BUILT_IN_RINT)
1823 CASE_MATHFN (BUILT_IN_ROUND)
1824 CASE_MATHFN (BUILT_IN_SCALB)
1825 CASE_MATHFN (BUILT_IN_SCALBLN)
1826 CASE_MATHFN (BUILT_IN_SCALBN)
1827 CASE_MATHFN (BUILT_IN_SIGNBIT)
1828 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1829 CASE_MATHFN (BUILT_IN_SIN)
1830 CASE_MATHFN (BUILT_IN_SINCOS)
1831 CASE_MATHFN (BUILT_IN_SINH)
1832 CASE_MATHFN (BUILT_IN_SQRT)
1833 CASE_MATHFN (BUILT_IN_TAN)
1834 CASE_MATHFN (BUILT_IN_TANH)
1835 CASE_MATHFN (BUILT_IN_TGAMMA)
1836 CASE_MATHFN (BUILT_IN_TRUNC)
1837 CASE_MATHFN (BUILT_IN_Y0)
1838 CASE_MATHFN (BUILT_IN_Y1)
1839 CASE_MATHFN (BUILT_IN_YN)
1845 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1846 return fn_arr[fcode];
1847 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1848 return fn_arr[fcodef];
1849 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1850 return fn_arr[fcodel];
1855 /* Like mathfn_built_in_1(), but always use the implicit array. */
1858 mathfn_built_in (tree type, enum built_in_function fn)
1860 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1863 /* If errno must be maintained, expand the RTL to check if the result,
1864 TARGET, of a built-in function call, EXP, is NaN, and if so set
1868 expand_errno_check (tree exp, rtx target)
1870 rtx lab = gen_label_rtx ();
1872 /* Test the result; if it is NaN, set errno=EDOM because
1873 the argument was not in the domain. */
1874 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1875 NULL_RTX, NULL_RTX, lab);
1878 /* If this built-in doesn't throw an exception, set errno directly. */
1879 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1881 #ifdef GEN_ERRNO_RTX
1882 rtx errno_rtx = GEN_ERRNO_RTX;
1885 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1887 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1893 /* Make sure the library call isn't expanded as a tail call. */
1894 CALL_EXPR_TAILCALL (exp) = 0;
1896 /* We can't set errno=EDOM directly; let the library call do it.
1897 Pop the arguments right away in case the call gets deleted. */
1899 expand_call (exp, target, 0);
1904 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1905 Return NULL_RTX if a normal call should be emitted rather than expanding
1906 the function in-line. EXP is the expression that is a call to the builtin
1907 function; if convenient, the result should be placed in TARGET.
1908 SUBTARGET may be used as the target for computing one of EXP's operands. */
1911 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1913 optab builtin_optab;
1915 tree fndecl = get_callee_fndecl (exp);
1916 enum machine_mode mode;
1917 bool errno_set = false;
1920 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1923 arg = CALL_EXPR_ARG (exp, 0);
1925 switch (DECL_FUNCTION_CODE (fndecl))
1927 CASE_FLT_FN (BUILT_IN_SQRT):
1928 errno_set = ! tree_expr_nonnegative_p (arg);
1929 builtin_optab = sqrt_optab;
1931 CASE_FLT_FN (BUILT_IN_EXP):
1932 errno_set = true; builtin_optab = exp_optab; break;
1933 CASE_FLT_FN (BUILT_IN_EXP10):
1934 CASE_FLT_FN (BUILT_IN_POW10):
1935 errno_set = true; builtin_optab = exp10_optab; break;
1936 CASE_FLT_FN (BUILT_IN_EXP2):
1937 errno_set = true; builtin_optab = exp2_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXPM1):
1939 errno_set = true; builtin_optab = expm1_optab; break;
1940 CASE_FLT_FN (BUILT_IN_LOGB):
1941 errno_set = true; builtin_optab = logb_optab; break;
1942 CASE_FLT_FN (BUILT_IN_LOG):
1943 errno_set = true; builtin_optab = log_optab; break;
1944 CASE_FLT_FN (BUILT_IN_LOG10):
1945 errno_set = true; builtin_optab = log10_optab; break;
1946 CASE_FLT_FN (BUILT_IN_LOG2):
1947 errno_set = true; builtin_optab = log2_optab; break;
1948 CASE_FLT_FN (BUILT_IN_LOG1P):
1949 errno_set = true; builtin_optab = log1p_optab; break;
1950 CASE_FLT_FN (BUILT_IN_ASIN):
1951 builtin_optab = asin_optab; break;
1952 CASE_FLT_FN (BUILT_IN_ACOS):
1953 builtin_optab = acos_optab; break;
1954 CASE_FLT_FN (BUILT_IN_TAN):
1955 builtin_optab = tan_optab; break;
1956 CASE_FLT_FN (BUILT_IN_ATAN):
1957 builtin_optab = atan_optab; break;
1958 CASE_FLT_FN (BUILT_IN_FLOOR):
1959 builtin_optab = floor_optab; break;
1960 CASE_FLT_FN (BUILT_IN_CEIL):
1961 builtin_optab = ceil_optab; break;
1962 CASE_FLT_FN (BUILT_IN_TRUNC):
1963 builtin_optab = btrunc_optab; break;
1964 CASE_FLT_FN (BUILT_IN_ROUND):
1965 builtin_optab = round_optab; break;
1966 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1967 builtin_optab = nearbyint_optab;
1968 if (flag_trapping_math)
1970 /* Else fallthrough and expand as rint. */
1971 CASE_FLT_FN (BUILT_IN_RINT):
1972 builtin_optab = rint_optab; break;
1973 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1974 builtin_optab = significand_optab; break;
1979 /* Make a suitable register to place result in. */
1980 mode = TYPE_MODE (TREE_TYPE (exp));
1982 if (! flag_errno_math || ! HONOR_NANS (mode))
1985 /* Before working hard, check whether the instruction is available. */
1986 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1988 target = gen_reg_rtx (mode);
1990 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1991 need to expand the argument again. This way, we will not perform
1992 side-effects more the once. */
1993 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1995 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1999 /* Compute into TARGET.
2000 Set TARGET to wherever the result comes back. */
2001 target = expand_unop (mode, builtin_optab, op0, target, 0);
2006 expand_errno_check (exp, target);
2008 /* Output the entire sequence. */
2009 insns = get_insns ();
2015 /* If we were unable to expand via the builtin, stop the sequence
2016 (without outputting the insns) and call to the library function
2017 with the stabilized argument list. */
2021 return expand_call (exp, target, target == const0_rtx);
2024 /* Expand a call to the builtin binary math functions (pow and atan2).
2025 Return NULL_RTX if a normal call should be emitted rather than expanding the
2026 function in-line. EXP is the expression that is a call to the builtin
2027 function; if convenient, the result should be placed in TARGET.
2028 SUBTARGET may be used as the target for computing one of EXP's
2032 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2034 optab builtin_optab;
2035 rtx op0, op1, insns;
2036 int op1_type = REAL_TYPE;
2037 tree fndecl = get_callee_fndecl (exp);
2039 enum machine_mode mode;
2040 bool errno_set = true;
2042 switch (DECL_FUNCTION_CODE (fndecl))
2044 CASE_FLT_FN (BUILT_IN_SCALBN):
2045 CASE_FLT_FN (BUILT_IN_SCALBLN):
2046 CASE_FLT_FN (BUILT_IN_LDEXP):
2047 op1_type = INTEGER_TYPE;
2052 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2055 arg0 = CALL_EXPR_ARG (exp, 0);
2056 arg1 = CALL_EXPR_ARG (exp, 1);
2058 switch (DECL_FUNCTION_CODE (fndecl))
2060 CASE_FLT_FN (BUILT_IN_POW):
2061 builtin_optab = pow_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ATAN2):
2063 builtin_optab = atan2_optab; break;
2064 CASE_FLT_FN (BUILT_IN_SCALB):
2065 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2067 builtin_optab = scalb_optab; break;
2068 CASE_FLT_FN (BUILT_IN_SCALBN):
2069 CASE_FLT_FN (BUILT_IN_SCALBLN):
2070 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2072 /* Fall through... */
2073 CASE_FLT_FN (BUILT_IN_LDEXP):
2074 builtin_optab = ldexp_optab; break;
2075 CASE_FLT_FN (BUILT_IN_FMOD):
2076 builtin_optab = fmod_optab; break;
2077 CASE_FLT_FN (BUILT_IN_REMAINDER):
2078 CASE_FLT_FN (BUILT_IN_DREM):
2079 builtin_optab = remainder_optab; break;
2084 /* Make a suitable register to place result in. */
2085 mode = TYPE_MODE (TREE_TYPE (exp));
2087 /* Before working hard, check whether the instruction is available. */
2088 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2091 target = gen_reg_rtx (mode);
2093 if (! flag_errno_math || ! HONOR_NANS (mode))
2096 /* Always stabilize the argument list. */
2097 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2098 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2100 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2101 op1 = expand_normal (arg1);
2105 /* Compute into TARGET.
2106 Set TARGET to wherever the result comes back. */
2107 target = expand_binop (mode, builtin_optab, op0, op1,
2108 target, 0, OPTAB_DIRECT);
2110 /* If we were unable to expand via the builtin, stop the sequence
2111 (without outputting the insns) and call to the library function
2112 with the stabilized argument list. */
2116 return expand_call (exp, target, target == const0_rtx);
2120 expand_errno_check (exp, target);
2122 /* Output the entire sequence. */
2123 insns = get_insns ();
2130 /* Expand a call to the builtin sin and cos math functions.
2131 Return NULL_RTX if a normal call should be emitted rather than expanding the
2132 function in-line. EXP is the expression that is a call to the builtin
2133 function; if convenient, the result should be placed in TARGET.
2134 SUBTARGET may be used as the target for computing one of EXP's
2138 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2140 optab builtin_optab;
2142 tree fndecl = get_callee_fndecl (exp);
2143 enum machine_mode mode;
2146 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2149 arg = CALL_EXPR_ARG (exp, 0);
2151 switch (DECL_FUNCTION_CODE (fndecl))
2153 CASE_FLT_FN (BUILT_IN_SIN):
2154 CASE_FLT_FN (BUILT_IN_COS):
2155 builtin_optab = sincos_optab; break;
2160 /* Make a suitable register to place result in. */
2161 mode = TYPE_MODE (TREE_TYPE (exp));
2163 /* Check if sincos insn is available, otherwise fallback
2164 to sin or cos insn. */
2165 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2166 switch (DECL_FUNCTION_CODE (fndecl))
2168 CASE_FLT_FN (BUILT_IN_SIN):
2169 builtin_optab = sin_optab; break;
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 builtin_optab = cos_optab; break;
2176 /* Before working hard, check whether the instruction is available. */
2177 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2179 target = gen_reg_rtx (mode);
2181 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2182 need to expand the argument again. This way, we will not perform
2183 side-effects more the once. */
2184 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2186 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2190 /* Compute into TARGET.
2191 Set TARGET to wherever the result comes back. */
2192 if (builtin_optab == sincos_optab)
2196 switch (DECL_FUNCTION_CODE (fndecl))
2198 CASE_FLT_FN (BUILT_IN_SIN):
2199 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2201 CASE_FLT_FN (BUILT_IN_COS):
2202 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2207 gcc_assert (result);
2211 target = expand_unop (mode, builtin_optab, op0, target, 0);
2216 /* Output the entire sequence. */
2217 insns = get_insns ();
2223 /* If we were unable to expand via the builtin, stop the sequence
2224 (without outputting the insns) and call to the library function
2225 with the stabilized argument list. */
2229 target = expand_call (exp, target, target == const0_rtx);
2234 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2235 return an RTL instruction code that implements the functionality.
2236 If that isn't possible or available return CODE_FOR_nothing. */
2238 static enum insn_code
2239 interclass_mathfn_icode (tree arg, tree fndecl)
2241 bool errno_set = false;
2242 optab builtin_optab = 0;
2243 enum machine_mode mode;
2245 switch (DECL_FUNCTION_CODE (fndecl))
2247 CASE_FLT_FN (BUILT_IN_ILOGB):
2248 errno_set = true; builtin_optab = ilogb_optab; break;
2249 CASE_FLT_FN (BUILT_IN_ISINF):
2250 builtin_optab = isinf_optab; break;
2251 case BUILT_IN_ISNORMAL:
2252 case BUILT_IN_ISFINITE:
2253 CASE_FLT_FN (BUILT_IN_FINITE):
2254 case BUILT_IN_FINITED32:
2255 case BUILT_IN_FINITED64:
2256 case BUILT_IN_FINITED128:
2257 case BUILT_IN_ISINFD32:
2258 case BUILT_IN_ISINFD64:
2259 case BUILT_IN_ISINFD128:
2260 /* These builtins have no optabs (yet). */
2266 /* There's no easy way to detect the case we need to set EDOM. */
2267 if (flag_errno_math && errno_set)
2268 return CODE_FOR_nothing;
2270 /* Optab mode depends on the mode of the input argument. */
2271 mode = TYPE_MODE (TREE_TYPE (arg));
2274 return optab_handler (builtin_optab, mode)->insn_code;
2275 return CODE_FOR_nothing;
2278 /* Expand a call to one of the builtin math functions that operate on
2279 floating point argument and output an integer result (ilogb, isinf,
2281 Return 0 if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2283 function; if convenient, the result should be placed in TARGET.
2284 SUBTARGET may be used as the target for computing one of EXP's operands. */
2287 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2289 enum insn_code icode = CODE_FOR_nothing;
2291 tree fndecl = get_callee_fndecl (exp);
2292 enum machine_mode mode;
2295 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2298 arg = CALL_EXPR_ARG (exp, 0);
2299 icode = interclass_mathfn_icode (arg, fndecl);
2300 mode = TYPE_MODE (TREE_TYPE (arg));
2302 if (icode != CODE_FOR_nothing)
2304 /* Make a suitable register to place result in. */
2306 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2307 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2309 gcc_assert (insn_data[icode].operand[0].predicate
2310 (target, GET_MODE (target)));
2312 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2313 need to expand the argument again. This way, we will not perform
2314 side-effects more the once. */
2315 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2317 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2319 if (mode != GET_MODE (op0))
2320 op0 = convert_to_mode (mode, op0, 0);
2322 /* Compute into TARGET.
2323 Set TARGET to wherever the result comes back. */
2324 emit_unop_insn (icode, target, op0, UNKNOWN);
2331 /* Expand a call to the builtin sincos math function.
2332 Return NULL_RTX if a normal call should be emitted rather than expanding the
2333 function in-line. EXP is the expression that is a call to the builtin
2337 expand_builtin_sincos (tree exp)
2339 rtx op0, op1, op2, target1, target2;
2340 enum machine_mode mode;
2341 tree arg, sinp, cosp;
2343 location_t loc = EXPR_LOCATION (exp);
2345 if (!validate_arglist (exp, REAL_TYPE,
2346 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2349 arg = CALL_EXPR_ARG (exp, 0);
2350 sinp = CALL_EXPR_ARG (exp, 1);
2351 cosp = CALL_EXPR_ARG (exp, 2);
2353 /* Make a suitable register to place result in. */
2354 mode = TYPE_MODE (TREE_TYPE (arg));
2356 /* Check if sincos insn is available, otherwise emit the call. */
2357 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2360 target1 = gen_reg_rtx (mode);
2361 target2 = gen_reg_rtx (mode);
2363 op0 = expand_normal (arg);
2364 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2365 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2367 /* Compute into target1 and target2.
2368 Set TARGET to wherever the result comes back. */
2369 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2370 gcc_assert (result);
2372 /* Move target1 and target2 to the memory locations indicated
2374 emit_move_insn (op1, target1);
2375 emit_move_insn (op2, target2);
2380 /* Expand a call to the internal cexpi builtin to the sincos math function.
2381 EXP is the expression that is a call to the builtin function; if convenient,
2382 the result should be placed in TARGET. SUBTARGET may be used as the target
2383 for computing one of EXP's operands. */
2386 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2388 tree fndecl = get_callee_fndecl (exp);
2390 enum machine_mode mode;
2392 location_t loc = EXPR_LOCATION (exp);
2394 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2397 arg = CALL_EXPR_ARG (exp, 0);
2398 type = TREE_TYPE (arg);
2399 mode = TYPE_MODE (TREE_TYPE (arg));
2401 /* Try expanding via a sincos optab, fall back to emitting a libcall
2402 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2403 is only generated from sincos, cexp or if we have either of them. */
2404 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2406 op1 = gen_reg_rtx (mode);
2407 op2 = gen_reg_rtx (mode);
2409 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2411 /* Compute into op1 and op2. */
2412 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2414 else if (TARGET_HAS_SINCOS)
2416 tree call, fn = NULL_TREE;
2420 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2421 fn = built_in_decls[BUILT_IN_SINCOSF];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2423 fn = built_in_decls[BUILT_IN_SINCOS];
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2425 fn = built_in_decls[BUILT_IN_SINCOSL];
2429 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2430 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2431 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2432 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2433 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2434 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2436 /* Make sure not to fold the sincos call again. */
2437 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2438 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2439 call, 3, arg, top1, top2));
2443 tree call, fn = NULL_TREE, narg;
2444 tree ctype = build_complex_type (type);
2446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2447 fn = built_in_decls[BUILT_IN_CEXPF];
2448 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2449 fn = built_in_decls[BUILT_IN_CEXP];
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2451 fn = built_in_decls[BUILT_IN_CEXPL];
2455 /* If we don't have a decl for cexp create one. This is the
2456 friendliest fallback if the user calls __builtin_cexpi
2457 without full target C99 function support. */
2458 if (fn == NULL_TREE)
2461 const char *name = NULL;
2463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2465 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2467 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2470 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2471 fn = build_fn_decl (name, fntype);
2474 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2475 build_real (type, dconst0), arg);
2477 /* Make sure not to fold the cexp call again. */
2478 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2479 return expand_expr (build_call_nary (ctype, call, 1, narg),
2480 target, VOIDmode, EXPAND_NORMAL);
2483 /* Now build the proper return type. */
2484 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2485 make_tree (TREE_TYPE (arg), op2),
2486 make_tree (TREE_TYPE (arg), op1)),
2487 target, VOIDmode, EXPAND_NORMAL);
2490 /* Conveniently construct a function call expression. FNDECL names the
2491 function to be called, N is the number of arguments, and the "..."
2492 parameters are the argument expressions. Unlike build_call_exr
2493 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2496 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2499 tree fntype = TREE_TYPE (fndecl);
2500 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2503 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2505 SET_EXPR_LOCATION (fn, loc);
2508 #define build_call_nofold(...) \
2509 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2511 /* Expand a call to one of the builtin rounding functions gcc defines
2512 as an extension (lfloor and lceil). As these are gcc extensions we
2513 do not need to worry about setting errno to EDOM.
2514 If expanding via optab fails, lower expression to (int)(floor(x)).
2515 EXP is the expression that is a call to the builtin function;
2516 if convenient, the result should be placed in TARGET. */
2519 expand_builtin_int_roundingfn (tree exp, rtx target)
2521 convert_optab builtin_optab;
2522 rtx op0, insns, tmp;
2523 tree fndecl = get_callee_fndecl (exp);
2524 enum built_in_function fallback_fn;
2525 tree fallback_fndecl;
2526 enum machine_mode mode;
2529 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2532 arg = CALL_EXPR_ARG (exp, 0);
2534 switch (DECL_FUNCTION_CODE (fndecl))
2536 CASE_FLT_FN (BUILT_IN_LCEIL):
2537 CASE_FLT_FN (BUILT_IN_LLCEIL):
2538 builtin_optab = lceil_optab;
2539 fallback_fn = BUILT_IN_CEIL;
2542 CASE_FLT_FN (BUILT_IN_LFLOOR):
2543 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2544 builtin_optab = lfloor_optab;
2545 fallback_fn = BUILT_IN_FLOOR;
2552 /* Make a suitable register to place result in. */
2553 mode = TYPE_MODE (TREE_TYPE (exp));
2555 target = gen_reg_rtx (mode);
2557 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2558 need to expand the argument again. This way, we will not perform
2559 side-effects more the once. */
2560 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2562 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2566 /* Compute into TARGET. */
2567 if (expand_sfix_optab (target, op0, builtin_optab))
2569 /* Output the entire sequence. */
2570 insns = get_insns ();
2576 /* If we were unable to expand via the builtin, stop the sequence
2577 (without outputting the insns). */
2580 /* Fall back to floating point rounding optab. */
2581 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2583 /* For non-C99 targets we may end up without a fallback fndecl here
2584 if the user called __builtin_lfloor directly. In this case emit
2585 a call to the floor/ceil variants nevertheless. This should result
2586 in the best user experience for not full C99 targets. */
2587 if (fallback_fndecl == NULL_TREE)
2590 const char *name = NULL;
2592 switch (DECL_FUNCTION_CODE (fndecl))
2594 case BUILT_IN_LCEIL:
2595 case BUILT_IN_LLCEIL:
2598 case BUILT_IN_LCEILF:
2599 case BUILT_IN_LLCEILF:
2602 case BUILT_IN_LCEILL:
2603 case BUILT_IN_LLCEILL:
2606 case BUILT_IN_LFLOOR:
2607 case BUILT_IN_LLFLOOR:
2610 case BUILT_IN_LFLOORF:
2611 case BUILT_IN_LLFLOORF:
2614 case BUILT_IN_LFLOORL:
2615 case BUILT_IN_LLFLOORL:
2622 fntype = build_function_type_list (TREE_TYPE (arg),
2623 TREE_TYPE (arg), NULL_TREE);
2624 fallback_fndecl = build_fn_decl (name, fntype);
2627 exp = build_call_nofold (fallback_fndecl, 1, arg);
2629 tmp = expand_normal (exp);
2631 /* Truncate the result of floating point optab to integer
2632 via expand_fix (). */
2633 target = gen_reg_rtx (mode);
2634 expand_fix (target, tmp, 0);
2639 /* Expand a call to one of the builtin math functions doing integer
2641 Return 0 if a normal call should be emitted rather than expanding the
2642 function in-line. EXP is the expression that is a call to the builtin
2643 function; if convenient, the result should be placed in TARGET. */
2646 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2648 convert_optab builtin_optab;
2650 tree fndecl = get_callee_fndecl (exp);
2652 enum machine_mode mode;
2654 /* There's no easy way to detect the case we need to set EDOM. */
2655 if (flag_errno_math)
2658 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2661 arg = CALL_EXPR_ARG (exp, 0);
2663 switch (DECL_FUNCTION_CODE (fndecl))
2665 CASE_FLT_FN (BUILT_IN_LRINT):
2666 CASE_FLT_FN (BUILT_IN_LLRINT):
2667 builtin_optab = lrint_optab; break;
2668 CASE_FLT_FN (BUILT_IN_LROUND):
2669 CASE_FLT_FN (BUILT_IN_LLROUND):
2670 builtin_optab = lround_optab; break;
2675 /* Make a suitable register to place result in. */
2676 mode = TYPE_MODE (TREE_TYPE (exp));
2678 target = gen_reg_rtx (mode);
2680 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2681 need to expand the argument again. This way, we will not perform
2682 side-effects more the once. */
2683 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2685 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2689 if (expand_sfix_optab (target, op0, builtin_optab))
2691 /* Output the entire sequence. */
2692 insns = get_insns ();
2698 /* If we were unable to expand via the builtin, stop the sequence
2699 (without outputting the insns) and call to the library function
2700 with the stabilized argument list. */
2703 target = expand_call (exp, target, target == const0_rtx);
2708 /* To evaluate powi(x,n), the floating point value x raised to the
2709 constant integer exponent n, we use a hybrid algorithm that
2710 combines the "window method" with look-up tables. For an
2711 introduction to exponentiation algorithms and "addition chains",
2712 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2713 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2714 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2715 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2717 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2718 multiplications to inline before calling the system library's pow
2719 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2720 so this default never requires calling pow, powf or powl. */
2722 #ifndef POWI_MAX_MULTS
2723 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2726 /* The size of the "optimal power tree" lookup table. All
2727 exponents less than this value are simply looked up in the
2728 powi_table below. This threshold is also used to size the
2729 cache of pseudo registers that hold intermediate results. */
2730 #define POWI_TABLE_SIZE 256
2732 /* The size, in bits of the window, used in the "window method"
2733 exponentiation algorithm. This is equivalent to a radix of
2734 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2735 #define POWI_WINDOW_SIZE 3
2737 /* The following table is an efficient representation of an
2738 "optimal power tree". For each value, i, the corresponding
2739 value, j, in the table states than an optimal evaluation
2740 sequence for calculating pow(x,i) can be found by evaluating
2741 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2742 100 integers is given in Knuth's "Seminumerical algorithms". */
2744 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2746 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2747 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2748 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2749 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2750 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2751 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2752 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2753 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2754 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2755 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2756 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2757 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2758 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2759 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2760 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2761 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2762 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2763 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2764 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2765 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2766 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2767 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2768 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2769 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2770 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2771 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2772 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2773 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2774 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2775 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2776 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2777 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2781 /* Return the number of multiplications required to calculate
2782 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2783 subroutine of powi_cost. CACHE is an array indicating
2784 which exponents have already been calculated. */
2787 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2789 /* If we've already calculated this exponent, then this evaluation
2790 doesn't require any additional multiplications. */
2795 return powi_lookup_cost (n - powi_table[n], cache)
2796 + powi_lookup_cost (powi_table[n], cache) + 1;
2799 /* Return the number of multiplications required to calculate
2800 powi(x,n) for an arbitrary x, given the exponent N. This
2801 function needs to be kept in sync with expand_powi below. */
2804 powi_cost (HOST_WIDE_INT n)
2806 bool cache[POWI_TABLE_SIZE];
2807 unsigned HOST_WIDE_INT digit;
2808 unsigned HOST_WIDE_INT val;
2814 /* Ignore the reciprocal when calculating the cost. */
2815 val = (n < 0) ? -n : n;
2817 /* Initialize the exponent cache. */
2818 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2823 while (val >= POWI_TABLE_SIZE)
2827 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2828 result += powi_lookup_cost (digit, cache)
2829 + POWI_WINDOW_SIZE + 1;
2830 val >>= POWI_WINDOW_SIZE;
2839 return result + powi_lookup_cost (val, cache);
2842 /* Recursive subroutine of expand_powi. This function takes the array,
2843 CACHE, of already calculated exponents and an exponent N and returns
2844 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2847 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2849 unsigned HOST_WIDE_INT digit;
2853 if (n < POWI_TABLE_SIZE)
2858 target = gen_reg_rtx (mode);
2861 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2862 op1 = expand_powi_1 (mode, powi_table[n], cache);
2866 target = gen_reg_rtx (mode);
2867 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2868 op0 = expand_powi_1 (mode, n - digit, cache);
2869 op1 = expand_powi_1 (mode, digit, cache);
2873 target = gen_reg_rtx (mode);
2874 op0 = expand_powi_1 (mode, n >> 1, cache);
2878 result = expand_mult (mode, op0, op1, target, 0);
2879 if (result != target)
2880 emit_move_insn (target, result);
2884 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2885 floating point operand in mode MODE, and N is the exponent. This
2886 function needs to be kept in sync with powi_cost above. */
2889 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2891 rtx cache[POWI_TABLE_SIZE];
2895 return CONST1_RTX (mode);
2897 memset (cache, 0, sizeof (cache));
2900 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2902 /* If the original exponent was negative, reciprocate the result. */
2904 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2905 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2910 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2911 a normal call should be emitted rather than expanding the function
2912 in-line. EXP is the expression that is a call to the builtin
2913 function; if convenient, the result should be placed in TARGET. */
2916 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2920 tree type = TREE_TYPE (exp);
2921 REAL_VALUE_TYPE cint, c, c2;
2924 enum machine_mode mode = TYPE_MODE (type);
2926 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2929 arg0 = CALL_EXPR_ARG (exp, 0);
2930 arg1 = CALL_EXPR_ARG (exp, 1);
2932 if (TREE_CODE (arg1) != REAL_CST
2933 || TREE_OVERFLOW (arg1))
2934 return expand_builtin_mathfn_2 (exp, target, subtarget);
2936 /* Handle constant exponents. */
2938 /* For integer valued exponents we can expand to an optimal multiplication
2939 sequence using expand_powi. */
2940 c = TREE_REAL_CST (arg1);
2941 n = real_to_integer (&c);
2942 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2943 if (real_identical (&c, &cint)
2944 && ((n >= -1 && n <= 2)
2945 || (flag_unsafe_math_optimizations
2946 && optimize_insn_for_speed_p ()
2947 && powi_cost (n) <= POWI_MAX_MULTS)))
2949 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2952 op = force_reg (mode, op);
2953 op = expand_powi (op, mode, n);
2958 narg0 = builtin_save_expr (arg0);
2960 /* If the exponent is not integer valued, check if it is half of an integer.
2961 In this case we can expand to sqrt (x) * x**(n/2). */
2962 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2963 if (fn != NULL_TREE)
2965 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2966 n = real_to_integer (&c2);
2967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2968 if (real_identical (&c2, &cint)
2969 && ((flag_unsafe_math_optimizations
2970 && optimize_insn_for_speed_p ()
2971 && powi_cost (n/2) <= POWI_MAX_MULTS)
2974 tree call_expr = build_call_nofold (fn, 1, narg0);
2975 /* Use expand_expr in case the newly built call expression
2976 was folded to a non-call. */
2977 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2980 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2981 op2 = force_reg (mode, op2);
2982 op2 = expand_powi (op2, mode, abs (n / 2));
2983 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2984 0, OPTAB_LIB_WIDEN);
2985 /* If the original exponent was negative, reciprocate the
2988 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2989 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2995 /* Try if the exponent is a third of an integer. In this case
2996 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2997 different from pow (x, 1./3.) due to rounding and behavior
2998 with negative x we need to constrain this transformation to
2999 unsafe math and positive x or finite math. */
3000 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3002 && flag_unsafe_math_optimizations
3003 && (tree_expr_nonnegative_p (arg0)
3004 || !HONOR_NANS (mode)))
3006 REAL_VALUE_TYPE dconst3;
3007 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3008 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3009 real_round (&c2, mode, &c2);
3010 n = real_to_integer (&c2);
3011 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3012 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3013 real_convert (&c2, mode, &c2);
3014 if (real_identical (&c2, &c)
3015 && ((optimize_insn_for_speed_p ()
3016 && powi_cost (n/3) <= POWI_MAX_MULTS)
3019 tree call_expr = build_call_nofold (fn, 1,narg0);
3020 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3021 if (abs (n) % 3 == 2)
3022 op = expand_simple_binop (mode, MULT, op, op, op,
3023 0, OPTAB_LIB_WIDEN);
3026 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3027 op2 = force_reg (mode, op2);
3028 op2 = expand_powi (op2, mode, abs (n / 3));
3029 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3030 0, OPTAB_LIB_WIDEN);
3031 /* If the original exponent was negative, reciprocate the
3034 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3035 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3041 /* Fall back to optab expansion. */
3042 return expand_builtin_mathfn_2 (exp, target, subtarget);
3045 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3046 a normal call should be emitted rather than expanding the function
3047 in-line. EXP is the expression that is a call to the builtin
3048 function; if convenient, the result should be placed in TARGET. */
3051 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3055 enum machine_mode mode;
3056 enum machine_mode mode2;
3058 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3061 arg0 = CALL_EXPR_ARG (exp, 0);
3062 arg1 = CALL_EXPR_ARG (exp, 1);
3063 mode = TYPE_MODE (TREE_TYPE (exp));
3065 /* Handle constant power. */
3067 if (TREE_CODE (arg1) == INTEGER_CST
3068 && !TREE_OVERFLOW (arg1))
3070 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3072 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3073 Otherwise, check the number of multiplications required. */
3074 if ((TREE_INT_CST_HIGH (arg1) == 0
3075 || TREE_INT_CST_HIGH (arg1) == -1)
3076 && ((n >= -1 && n <= 2)
3077 || (optimize_insn_for_speed_p ()
3078 && powi_cost (n) <= POWI_MAX_MULTS)))
3080 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3081 op0 = force_reg (mode, op0);
3082 return expand_powi (op0, mode, n);
3086 /* Emit a libcall to libgcc. */
3088 /* Mode of the 2nd argument must match that of an int. */
3089 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3091 if (target == NULL_RTX)
3092 target = gen_reg_rtx (mode);
3094 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3095 if (GET_MODE (op0) != mode)
3096 op0 = convert_to_mode (mode, op0, 0);
3097 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3098 if (GET_MODE (op1) != mode2)
3099 op1 = convert_to_mode (mode2, op1, 0);
3101 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3102 target, LCT_CONST, mode, 2,
3103 op0, mode, op1, mode2);
3108 /* Expand expression EXP which is a call to the strlen builtin. Return
3109 NULL_RTX if we failed the caller should emit a normal call, otherwise
3110 try to get the result in TARGET, if convenient. */
3113 expand_builtin_strlen (tree exp, rtx target,
3114 enum machine_mode target_mode)
3116 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3122 tree src = CALL_EXPR_ARG (exp, 0);
3123 rtx result, src_reg, char_rtx, before_strlen;
3124 enum machine_mode insn_mode = target_mode, char_mode;
3125 enum insn_code icode = CODE_FOR_nothing;
3128 /* If the length can be computed at compile-time, return it. */
3129 len = c_strlen (src, 0);
3131 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 /* If the length can be computed at compile-time and is constant
3134 integer, but there are side-effects in src, evaluate
3135 src for side-effects, then return len.
3136 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3137 can be optimized into: i++; x = 3; */
3138 len = c_strlen (src, 1);
3139 if (len && TREE_CODE (len) == INTEGER_CST)
3141 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3145 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3147 /* If SRC is not a pointer type, don't do this operation inline. */
3151 /* Bail out if we can't compute strlen in the right mode. */
3152 while (insn_mode != VOIDmode)
3154 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3155 if (icode != CODE_FOR_nothing)
3158 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3160 if (insn_mode == VOIDmode)
3163 /* Make a place to write the result of the instruction. */
3167 && GET_MODE (result) == insn_mode
3168 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3169 result = gen_reg_rtx (insn_mode);
3171 /* Make a place to hold the source address. We will not expand
3172 the actual source until we are sure that the expansion will
3173 not fail -- there are trees that cannot be expanded twice. */
3174 src_reg = gen_reg_rtx (Pmode);
3176 /* Mark the beginning of the strlen sequence so we can emit the
3177 source operand later. */
3178 before_strlen = get_last_insn ();
3180 char_rtx = const0_rtx;
3181 char_mode = insn_data[(int) icode].operand[2].mode;
3182 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3184 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3186 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3187 char_rtx, GEN_INT (align));
3192 /* Now that we are assured of success, expand the source. */
3194 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3196 emit_move_insn (src_reg, pat);
3201 emit_insn_after (pat, before_strlen);
3203 emit_insn_before (pat, get_insns ());
3205 /* Return the value in the proper mode for this function. */
3206 if (GET_MODE (result) == target_mode)
3208 else if (target != 0)
3209 convert_move (target, result, 0);
3211 target = convert_to_mode (target_mode, result, 0);
3217 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3218 bytes from constant string DATA + OFFSET and return it as target
3222 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3223 enum machine_mode mode)
3225 const char *str = (const char *) data;
3227 gcc_assert (offset >= 0
3228 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3229 <= strlen (str) + 1));
3231 return c_readstr (str + offset, mode);
3234 /* Expand a call EXP to the memcpy builtin.
3235 Return NULL_RTX if we failed, the caller should emit a normal call,
3236 otherwise try to get the result in TARGET, if convenient (and in
3237 mode MODE if that's convenient). */
3240 expand_builtin_memcpy (tree exp, rtx target)
3242 if (!validate_arglist (exp,
3243 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 tree dest = CALL_EXPR_ARG (exp, 0);
3248 tree src = CALL_EXPR_ARG (exp, 1);
3249 tree len = CALL_EXPR_ARG (exp, 2);
3250 const char *src_str;
3251 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3252 unsigned int dest_align
3253 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3254 rtx dest_mem, src_mem, dest_addr, len_rtx;
3255 HOST_WIDE_INT expected_size = -1;
3256 unsigned int expected_align = 0;
3258 /* If DEST is not a pointer type, call the normal function. */
3259 if (dest_align == 0)
3262 /* If either SRC is not a pointer type, don't do this
3263 operation in-line. */
3267 if (currently_expanding_gimple_stmt)
3268 stringop_block_profile (currently_expanding_gimple_stmt,
3269 &expected_align, &expected_size);
3271 if (expected_align < dest_align)
3272 expected_align = dest_align;
3273 dest_mem = get_memory_rtx (dest, len);
3274 set_mem_align (dest_mem, dest_align);
3275 len_rtx = expand_normal (len);
3276 src_str = c_getstr (src);
3278 /* If SRC is a string constant and block move would be done
3279 by pieces, we can avoid loading the string from memory
3280 and only stored the computed constants. */
3282 && CONST_INT_P (len_rtx)
3283 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3284 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3285 CONST_CAST (char *, src_str),
3288 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3289 builtin_memcpy_read_str,
3290 CONST_CAST (char *, src_str),
3291 dest_align, false, 0);
3292 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3293 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3297 src_mem = get_memory_rtx (src, len);
3298 set_mem_align (src_mem, src_align);
3300 /* Copy word part most expediently. */
3301 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3302 CALL_EXPR_TAILCALL (exp)
3303 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3304 expected_align, expected_size);
3308 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3309 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3315 /* Expand a call EXP to the mempcpy builtin.
3316 Return NULL_RTX if we failed; the caller should emit a normal call,
3317 otherwise try to get the result in TARGET, if convenient (and in
3318 mode MODE if that's convenient). If ENDP is 0 return the
3319 destination pointer, if ENDP is 1 return the end pointer ala
3320 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3324 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3331 tree dest = CALL_EXPR_ARG (exp, 0);
3332 tree src = CALL_EXPR_ARG (exp, 1);
3333 tree len = CALL_EXPR_ARG (exp, 2);
3334 return expand_builtin_mempcpy_args (dest, src, len,
3335 target, mode, /*endp=*/ 1);
3339 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3340 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3341 so that this can also be called without constructing an actual CALL_EXPR.
3342 The other arguments and return value are the same as for
3343 expand_builtin_mempcpy. */
3346 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3347 rtx target, enum machine_mode mode, int endp)
3349 /* If return value is ignored, transform mempcpy into memcpy. */
3350 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3352 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3353 tree result = build_call_nofold (fn, 3, dest, src, len);
3354 return expand_expr (result, target, mode, EXPAND_NORMAL);
3358 const char *src_str;
3359 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3360 unsigned int dest_align
3361 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3362 rtx dest_mem, src_mem, len_rtx;
3364 /* If either SRC or DEST is not a pointer type, don't do this
3365 operation in-line. */
3366 if (dest_align == 0 || src_align == 0)
3369 /* If LEN is not constant, call the normal function. */
3370 if (! host_integerp (len, 1))
3373 len_rtx = expand_normal (len);
3374 src_str = c_getstr (src);
3376 /* If SRC is a string constant and block move would be done
3377 by pieces, we can avoid loading the string from memory
3378 and only stored the computed constants. */
3380 && CONST_INT_P (len_rtx)
3381 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3382 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3383 CONST_CAST (char *, src_str),
3386 dest_mem = get_memory_rtx (dest, len);
3387 set_mem_align (dest_mem, dest_align);
3388 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3389 builtin_memcpy_read_str,
3390 CONST_CAST (char *, src_str),
3391 dest_align, false, endp);
3392 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3393 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3397 if (CONST_INT_P (len_rtx)
3398 && can_move_by_pieces (INTVAL (len_rtx),
3399 MIN (dest_align, src_align)))
3401 dest_mem = get_memory_rtx (dest, len);
3402 set_mem_align (dest_mem, dest_align);
3403 src_mem = get_memory_rtx (src, len);
3404 set_mem_align (src_mem, src_align);
3405 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3406 MIN (dest_align, src_align), endp);
3407 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3408 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3417 # define HAVE_movstr 0
3418 # define CODE_FOR_movstr CODE_FOR_nothing
3421 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3422 we failed, the caller should emit a normal call, otherwise try to
3423 get the result in TARGET, if convenient. If ENDP is 0 return the
3424 destination pointer, if ENDP is 1 return the end pointer ala
3425 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3429 expand_movstr (tree dest, tree src, rtx target, int endp)
3435 const struct insn_data * data;
3440 dest_mem = get_memory_rtx (dest, NULL);
3441 src_mem = get_memory_rtx (src, NULL);
3444 target = force_reg (Pmode, XEXP (dest_mem, 0));
3445 dest_mem = replace_equiv_address (dest_mem, target);
3446 end = gen_reg_rtx (Pmode);
3450 if (target == 0 || target == const0_rtx)
3452 end = gen_reg_rtx (Pmode);
3460 data = insn_data + CODE_FOR_movstr;
3462 if (data->operand[0].mode != VOIDmode)
3463 end = gen_lowpart (data->operand[0].mode, end);
3465 insn = data->genfun (end, dest_mem, src_mem);
3471 /* movstr is supposed to set end to the address of the NUL
3472 terminator. If the caller requested a mempcpy-like return value,
3474 if (endp == 1 && target != const0_rtx)
3476 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3477 emit_move_insn (target, force_operand (tem, NULL_RTX));
3483 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3484 NULL_RTX if we failed the caller should emit a normal call, otherwise
3485 try to get the result in TARGET, if convenient (and in mode MODE if that's
3489 expand_builtin_strcpy (tree exp, rtx target)
3491 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3493 tree dest = CALL_EXPR_ARG (exp, 0);
3494 tree src = CALL_EXPR_ARG (exp, 1);
3495 return expand_builtin_strcpy_args (dest, src, target);
3500 /* Helper function to do the actual work for expand_builtin_strcpy. The
3501 arguments to the builtin_strcpy call DEST and SRC are broken out
3502 so that this can also be called without constructing an actual CALL_EXPR.
3503 The other arguments and return value are the same as for
3504 expand_builtin_strcpy. */
3507 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3509 return expand_movstr (dest, src, target, /*endp=*/0);
3512 /* Expand a call EXP to the stpcpy builtin.
3513 Return NULL_RTX if we failed the caller should emit a normal call,
3514 otherwise try to get the result in TARGET, if convenient (and in
3515 mode MODE if that's convenient). */
3518 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3521 location_t loc = EXPR_LOCATION (exp);
3523 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3526 dst = CALL_EXPR_ARG (exp, 0);
3527 src = CALL_EXPR_ARG (exp, 1);
3529 /* If return value is ignored, transform stpcpy into strcpy. */
3530 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3532 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3533 tree result = build_call_nofold (fn, 2, dst, src);
3534 return expand_expr (result, target, mode, EXPAND_NORMAL);
3541 /* Ensure we get an actual string whose length can be evaluated at
3542 compile-time, not an expression containing a string. This is
3543 because the latter will potentially produce pessimized code
3544 when used to produce the return value. */
3545 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3546 return expand_movstr (dst, src, target, /*endp=*/2);
3548 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3549 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3550 target, mode, /*endp=*/2);
3555 if (TREE_CODE (len) == INTEGER_CST)
3557 rtx len_rtx = expand_normal (len);
3559 if (CONST_INT_P (len_rtx))
3561 ret = expand_builtin_strcpy_args (dst, src, target);
3567 if (mode != VOIDmode)
3568 target = gen_reg_rtx (mode);
3570 target = gen_reg_rtx (GET_MODE (ret));
3572 if (GET_MODE (target) != GET_MODE (ret))
3573 ret = gen_lowpart (GET_MODE (target), ret);
3575 ret = plus_constant (ret, INTVAL (len_rtx));
3576 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3584 return expand_movstr (dst, src, target, /*endp=*/2);
3588 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3589 bytes from constant string DATA + OFFSET and return it as target
3593 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3594 enum machine_mode mode)
3596 const char *str = (const char *) data;
3598 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3601 return c_readstr (str + offset, mode);
3604 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3605 NULL_RTX if we failed the caller should emit a normal call. */
3608 expand_builtin_strncpy (tree exp, rtx target)
3610 location_t loc = EXPR_LOCATION (exp);
3612 if (validate_arglist (exp,
3613 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3615 tree dest = CALL_EXPR_ARG (exp, 0);
3616 tree src = CALL_EXPR_ARG (exp, 1);
3617 tree len = CALL_EXPR_ARG (exp, 2);
3618 tree slen = c_strlen (src, 1);
3620 /* We must be passed a constant len and src parameter. */
3621 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3624 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3626 /* We're required to pad with trailing zeros if the requested
3627 len is greater than strlen(s2)+1. In that case try to
3628 use store_by_pieces, if it fails, punt. */
3629 if (tree_int_cst_lt (slen, len))
3631 unsigned int dest_align
3632 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3633 const char *p = c_getstr (src);
3636 if (!p || dest_align == 0 || !host_integerp (len, 1)
3637 || !can_store_by_pieces (tree_low_cst (len, 1),
3638 builtin_strncpy_read_str,
3639 CONST_CAST (char *, p),
3643 dest_mem = get_memory_rtx (dest, len);
3644 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3645 builtin_strncpy_read_str,
3646 CONST_CAST (char *, p), dest_align, false, 0);
3647 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3648 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3655 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3656 bytes from constant string DATA + OFFSET and return it as target
3660 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3661 enum machine_mode mode)
3663 const char *c = (const char *) data;
3664 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3666 memset (p, *c, GET_MODE_SIZE (mode));
3668 return c_readstr (p, mode);
3671 /* Callback routine for store_by_pieces. Return the RTL of a register
3672 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3673 char value given in the RTL register data. For example, if mode is
3674 4 bytes wide, return the RTL for 0x01010101*data. */
3677 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3678 enum machine_mode mode)
3684 size = GET_MODE_SIZE (mode);
3688 p = XALLOCAVEC (char, size);
3689 memset (p, 1, size);
3690 coeff = c_readstr (p, mode);
3692 target = convert_to_mode (mode, (rtx) data, 1);
3693 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3694 return force_reg (mode, target);
3697 /* Expand expression EXP, which is a call to the memset builtin. Return
3698 NULL_RTX if we failed the caller should emit a normal call, otherwise
3699 try to get the result in TARGET, if convenient (and in mode MODE if that's
3703 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3705 if (!validate_arglist (exp,
3706 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3710 tree dest = CALL_EXPR_ARG (exp, 0);
3711 tree val = CALL_EXPR_ARG (exp, 1);
3712 tree len = CALL_EXPR_ARG (exp, 2);
3713 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3717 /* Helper function to do the actual work for expand_builtin_memset. The
3718 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3719 so that this can also be called without constructing an actual CALL_EXPR.
3720 The other arguments and return value are the same as for
3721 expand_builtin_memset. */
3724 expand_builtin_memset_args (tree dest, tree val, tree len,
3725 rtx target, enum machine_mode mode, tree orig_exp)
3728 enum built_in_function fcode;
3730 unsigned int dest_align;
3731 rtx dest_mem, dest_addr, len_rtx;
3732 HOST_WIDE_INT expected_size = -1;
3733 unsigned int expected_align = 0;
3735 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3737 /* If DEST is not a pointer type, don't do this operation in-line. */
3738 if (dest_align == 0)
3741 if (currently_expanding_gimple_stmt)
3742 stringop_block_profile (currently_expanding_gimple_stmt,
3743 &expected_align, &expected_size);
3745 if (expected_align < dest_align)
3746 expected_align = dest_align;
3748 /* If the LEN parameter is zero, return DEST. */
3749 if (integer_zerop (len))
3751 /* Evaluate and ignore VAL in case it has side-effects. */
3752 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3753 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3756 /* Stabilize the arguments in case we fail. */
3757 dest = builtin_save_expr (dest);
3758 val = builtin_save_expr (val);
3759 len = builtin_save_expr (len);
3761 len_rtx = expand_normal (len);
3762 dest_mem = get_memory_rtx (dest, len);
3764 if (TREE_CODE (val) != INTEGER_CST)
3768 val_rtx = expand_normal (val);
3769 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3772 /* Assume that we can memset by pieces if we can store
3773 * the coefficients by pieces (in the required modes).
3774 * We can't pass builtin_memset_gen_str as that emits RTL. */
3776 if (host_integerp (len, 1)
3777 && can_store_by_pieces (tree_low_cst (len, 1),
3778 builtin_memset_read_str, &c, dest_align,
3781 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3783 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3784 builtin_memset_gen_str, val_rtx, dest_align,
3787 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3788 dest_align, expected_align,
3792 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3793 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3797 if (target_char_cast (val, &c))
3802 if (host_integerp (len, 1)
3803 && can_store_by_pieces (tree_low_cst (len, 1),
3804 builtin_memset_read_str, &c, dest_align,
3806 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3807 builtin_memset_read_str, &c, dest_align, true, 0);
3808 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3809 dest_align, expected_align,
3813 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3814 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3818 set_mem_align (dest_mem, dest_align);
3819 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3820 CALL_EXPR_TAILCALL (orig_exp)
3821 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3822 expected_align, expected_size);
3826 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3827 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3833 fndecl = get_callee_fndecl (orig_exp);
3834 fcode = DECL_FUNCTION_CODE (fndecl);
3835 if (fcode == BUILT_IN_MEMSET)
3836 fn = build_call_nofold (fndecl, 3, dest, val, len);
3837 else if (fcode == BUILT_IN_BZERO)
3838 fn = build_call_nofold (fndecl, 2, dest, len);
3841 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3842 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3843 return expand_call (fn, target, target == const0_rtx);
3846 /* Expand expression EXP, which is a call to the bzero builtin. Return
3847 NULL_RTX if we failed the caller should emit a normal call. */
3850 expand_builtin_bzero (tree exp)
3853 location_t loc = EXPR_LOCATION (exp);
3855 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3858 dest = CALL_EXPR_ARG (exp, 0);
3859 size = CALL_EXPR_ARG (exp, 1);
3861 /* New argument list transforming bzero(ptr x, int y) to
3862 memset(ptr x, int 0, size_t y). This is done this way
3863 so that if it isn't expanded inline, we fallback to
3864 calling bzero instead of memset. */
3866 return expand_builtin_memset_args (dest, integer_zero_node,
3867 fold_convert_loc (loc, sizetype, size),
3868 const0_rtx, VOIDmode, exp);
3871 /* Expand expression EXP, which is a call to the memcmp built-in function.
3872 Return NULL_RTX if we failed and the
3873 caller should emit a normal call, otherwise try to get the result in
3874 TARGET, if convenient (and in mode MODE, if that's convenient). */
3877 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3878 ATTRIBUTE_UNUSED enum machine_mode mode)
3880 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3882 if (!validate_arglist (exp,
3883 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3886 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3888 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3891 tree arg1 = CALL_EXPR_ARG (exp, 0);
3892 tree arg2 = CALL_EXPR_ARG (exp, 1);
3893 tree len = CALL_EXPR_ARG (exp, 2);
3896 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3898 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3899 enum machine_mode insn_mode;
3901 #ifdef HAVE_cmpmemsi
3903 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3906 #ifdef HAVE_cmpstrnsi
3908 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3913 /* If we don't have POINTER_TYPE, call the function. */
3914 if (arg1_align == 0 || arg2_align == 0)
3917 /* Make a place to write the result of the instruction. */
3920 && REG_P (result) && GET_MODE (result) == insn_mode
3921 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3922 result = gen_reg_rtx (insn_mode);
3924 arg1_rtx = get_memory_rtx (arg1, len);
3925 arg2_rtx = get_memory_rtx (arg2, len);
3926 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3928 /* Set MEM_SIZE as appropriate. */
3929 if (CONST_INT_P (arg3_rtx))
3931 set_mem_size (arg1_rtx, arg3_rtx);
3932 set_mem_size (arg2_rtx, arg3_rtx);
3935 #ifdef HAVE_cmpmemsi
3937 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3938 GEN_INT (MIN (arg1_align, arg2_align)));
3941 #ifdef HAVE_cmpstrnsi
3943 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3944 GEN_INT (MIN (arg1_align, arg2_align)));
3952 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3953 TYPE_MODE (integer_type_node), 3,
3954 XEXP (arg1_rtx, 0), Pmode,
3955 XEXP (arg2_rtx, 0), Pmode,
3956 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3957 TYPE_UNSIGNED (sizetype)),
3958 TYPE_MODE (sizetype));
3960 /* Return the value in the proper mode for this function. */
3961 mode = TYPE_MODE (TREE_TYPE (exp));
3962 if (GET_MODE (result) == mode)
3964 else if (target != 0)
3966 convert_move (target, result, 0);
3970 return convert_to_mode (mode, result, 0);
3977 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3978 if we failed the caller should emit a normal call, otherwise try to get
3979 the result in TARGET, if convenient. */
3982 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3984 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3987 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3988 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3989 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3991 rtx arg1_rtx, arg2_rtx;
3992 rtx result, insn = NULL_RTX;
3994 tree arg1 = CALL_EXPR_ARG (exp, 0);
3995 tree arg2 = CALL_EXPR_ARG (exp, 1);
3998 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4000 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4002 /* If we don't have POINTER_TYPE, call the function. */
4003 if (arg1_align == 0 || arg2_align == 0)
4006 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4007 arg1 = builtin_save_expr (arg1);
4008 arg2 = builtin_save_expr (arg2);
4010 arg1_rtx = get_memory_rtx (arg1, NULL);
4011 arg2_rtx = get_memory_rtx (arg2, NULL);
4013 #ifdef HAVE_cmpstrsi
4014 /* Try to call cmpstrsi. */
4017 enum machine_mode insn_mode
4018 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4020 /* Make a place to write the result of the instruction. */
4023 && REG_P (result) && GET_MODE (result) == insn_mode
4024 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4025 result = gen_reg_rtx (insn_mode);
4027 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4028 GEN_INT (MIN (arg1_align, arg2_align)));
4031 #ifdef HAVE_cmpstrnsi
4032 /* Try to determine at least one length and call cmpstrnsi. */
4033 if (!insn && HAVE_cmpstrnsi)
4038 enum machine_mode insn_mode
4039 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4040 tree len1 = c_strlen (arg1, 1);
4041 tree len2 = c_strlen (arg2, 1);
4044 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4046 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4048 /* If we don't have a constant length for the first, use the length
4049 of the second, if we know it. We don't require a constant for
4050 this case; some cost analysis could be done if both are available
4051 but neither is constant. For now, assume they're equally cheap,
4052 unless one has side effects. If both strings have constant lengths,
4059 else if (TREE_SIDE_EFFECTS (len1))
4061 else if (TREE_SIDE_EFFECTS (len2))
4063 else if (TREE_CODE (len1) != INTEGER_CST)
4065 else if (TREE_CODE (len2) != INTEGER_CST)
4067 else if (tree_int_cst_lt (len1, len2))
4072 /* If both arguments have side effects, we cannot optimize. */
4073 if (!len || TREE_SIDE_EFFECTS (len))
4076 arg3_rtx = expand_normal (len);
4078 /* Make a place to write the result of the instruction. */
4081 && REG_P (result) && GET_MODE (result) == insn_mode
4082 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4083 result = gen_reg_rtx (insn_mode);
4085 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4086 GEN_INT (MIN (arg1_align, arg2_align)));
4092 enum machine_mode mode;
4095 /* Return the value in the proper mode for this function. */
4096 mode = TYPE_MODE (TREE_TYPE (exp));
4097 if (GET_MODE (result) == mode)
4100 return convert_to_mode (mode, result, 0);
4101 convert_move (target, result, 0);
4105 /* Expand the library call ourselves using a stabilized argument
4106 list to avoid re-evaluating the function's arguments twice. */
4107 #ifdef HAVE_cmpstrnsi
4110 fndecl = get_callee_fndecl (exp);
4111 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4112 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4113 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4114 return expand_call (fn, target, target == const0_rtx);
4120 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4121 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4122 the result in TARGET, if convenient. */
4125 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4126 ATTRIBUTE_UNUSED enum machine_mode mode)
4128 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4130 if (!validate_arglist (exp,
4131 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4134 /* If c_strlen can determine an expression for one of the string
4135 lengths, and it doesn't have side effects, then emit cmpstrnsi
4136 using length MIN(strlen(string)+1, arg3). */
4137 #ifdef HAVE_cmpstrnsi
4140 tree len, len1, len2;
4141 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4144 tree arg1 = CALL_EXPR_ARG (exp, 0);
4145 tree arg2 = CALL_EXPR_ARG (exp, 1);
4146 tree arg3 = CALL_EXPR_ARG (exp, 2);
4149 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4151 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4152 enum machine_mode insn_mode
4153 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4155 len1 = c_strlen (arg1, 1);
4156 len2 = c_strlen (arg2, 1);
4159 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4161 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4163 /* If we don't have a constant length for the first, use the length
4164 of the second, if we know it. We don't require a constant for
4165 this case; some cost analysis could be done if both are available
4166 but neither is constant. For now, assume they're equally cheap,
4167 unless one has side effects. If both strings have constant lengths,
4174 else if (TREE_SIDE_EFFECTS (len1))
4176 else if (TREE_SIDE_EFFECTS (len2))
4178 else if (TREE_CODE (len1) != INTEGER_CST)
4180 else if (TREE_CODE (len2) != INTEGER_CST)
4182 else if (tree_int_cst_lt (len1, len2))
4187 /* If both arguments have side effects, we cannot optimize. */
4188 if (!len || TREE_SIDE_EFFECTS (len))
4191 /* The actual new length parameter is MIN(len,arg3). */
4192 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4193 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4195 /* If we don't have POINTER_TYPE, call the function. */
4196 if (arg1_align == 0 || arg2_align == 0)
4199 /* Make a place to write the result of the instruction. */
4202 && REG_P (result) && GET_MODE (result) == insn_mode
4203 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4204 result = gen_reg_rtx (insn_mode);
4206 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4207 arg1 = builtin_save_expr (arg1);
4208 arg2 = builtin_save_expr (arg2);
4209 len = builtin_save_expr (len);
4211 arg1_rtx = get_memory_rtx (arg1, len);
4212 arg2_rtx = get_memory_rtx (arg2, len);
4213 arg3_rtx = expand_normal (len);
4214 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4215 GEN_INT (MIN (arg1_align, arg2_align)));
4220 /* Return the value in the proper mode for this function. */
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 if (GET_MODE (result) == mode)
4225 return convert_to_mode (mode, result, 0);
4226 convert_move (target, result, 0);
4230 /* Expand the library call ourselves using a stabilized argument
4231 list to avoid re-evaluating the function's arguments twice. */
4232 fndecl = get_callee_fndecl (exp);
4233 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4234 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4235 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4236 return expand_call (fn, target, target == const0_rtx);
4242 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4243 if that's convenient. */
4246 expand_builtin_saveregs (void)
4250 /* Don't do __builtin_saveregs more than once in a function.
4251 Save the result of the first call and reuse it. */
4252 if (saveregs_value != 0)
4253 return saveregs_value;
4255 /* When this function is called, it means that registers must be
4256 saved on entry to this function. So we migrate the call to the
4257 first insn of this function. */
4261 /* Do whatever the machine needs done in this case. */
4262 val = targetm.calls.expand_builtin_saveregs ();
4267 saveregs_value = val;
4269 /* Put the insns after the NOTE that starts the function. If this
4270 is inside a start_sequence, make the outer-level insn chain current, so
4271 the code is placed at the start of the function. */
4272 push_topmost_sequence ();
4273 emit_insn_after (seq, entry_of_function ());
4274 pop_topmost_sequence ();
4279 /* __builtin_args_info (N) returns word N of the arg space info
4280 for the current function. The number and meanings of words
4281 is controlled by the definition of CUMULATIVE_ARGS. */
4284 expand_builtin_args_info (tree exp)
4286 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4287 int *word_ptr = (int *) &crtl->args.info;
4289 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4291 if (call_expr_nargs (exp) != 0)
4293 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4294 error ("argument of %<__builtin_args_info%> must be constant");
4297 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4299 if (wordnum < 0 || wordnum >= nwords)
4300 error ("argument of %<__builtin_args_info%> out of range");
4302 return GEN_INT (word_ptr[wordnum]);
4306 error ("missing argument in %<__builtin_args_info%>");
4311 /* Expand a call to __builtin_next_arg. */
4314 expand_builtin_next_arg (void)
4316 /* Checking arguments is already done in fold_builtin_next_arg
4317 that must be called before this function. */
4318 return expand_binop (ptr_mode, add_optab,
4319 crtl->args.internal_arg_pointer,
4320 crtl->args.arg_offset_rtx,
4321 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4324 /* Make it easier for the backends by protecting the valist argument
4325 from multiple evaluations. */
4328 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4330 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4332 gcc_assert (vatype != NULL_TREE);
4334 if (TREE_CODE (vatype) == ARRAY_TYPE)
4336 if (TREE_SIDE_EFFECTS (valist))
4337 valist = save_expr (valist);
4339 /* For this case, the backends will be expecting a pointer to
4340 vatype, but it's possible we've actually been given an array
4341 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4343 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4345 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4346 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4355 if (! TREE_SIDE_EFFECTS (valist))
4358 pt = build_pointer_type (vatype);
4359 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4360 TREE_SIDE_EFFECTS (valist) = 1;
4363 if (TREE_SIDE_EFFECTS (valist))
4364 valist = save_expr (valist);
4365 valist = build_fold_indirect_ref_loc (loc, valist);
4371 /* The "standard" definition of va_list is void*. */
4374 std_build_builtin_va_list (void)
4376 return ptr_type_node;
4379 /* The "standard" abi va_list is va_list_type_node. */
4382 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4384 return va_list_type_node;
4387 /* The "standard" type of va_list is va_list_type_node. */
4390 std_canonical_va_list_type (tree type)
4394 if (INDIRECT_REF_P (type))
4395 type = TREE_TYPE (type);
4396 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4397 type = TREE_TYPE (type);
4398 wtype = va_list_type_node;
4400 /* Treat structure va_list types. */
4401 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4402 htype = TREE_TYPE (htype);
4403 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4405 /* If va_list is an array type, the argument may have decayed
4406 to a pointer type, e.g. by being passed to another function.
4407 In that case, unwrap both types so that we can compare the
4408 underlying records. */
4409 if (TREE_CODE (htype) == ARRAY_TYPE
4410 || POINTER_TYPE_P (htype))
4412 wtype = TREE_TYPE (wtype);
4413 htype = TREE_TYPE (htype);
4416 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4417 return va_list_type_node;
4422 /* The "standard" implementation of va_start: just assign `nextarg' to
4426 std_expand_builtin_va_start (tree valist, rtx nextarg)
4428 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4429 convert_move (va_r, nextarg, 0);
4432 /* Expand EXP, a call to __builtin_va_start. */
4435 expand_builtin_va_start (tree exp)
4439 location_t loc = EXPR_LOCATION (exp);
4441 if (call_expr_nargs (exp) < 2)
4443 error_at (loc, "too few arguments to function %<va_start%>");
4447 if (fold_builtin_next_arg (exp, true))
4450 nextarg = expand_builtin_next_arg ();
4451 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4453 if (targetm.expand_builtin_va_start)
4454 targetm.expand_builtin_va_start (valist, nextarg);
4456 std_expand_builtin_va_start (valist, nextarg);
4461 /* The "standard" implementation of va_arg: read the value from the
4462 current (padded) address and increment by the (padded) size. */
4465 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4468 tree addr, t, type_size, rounded_size, valist_tmp;
4469 unsigned HOST_WIDE_INT align, boundary;
4472 #ifdef ARGS_GROW_DOWNWARD
4473 /* All of the alignment and movement below is for args-grow-up machines.
4474 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4475 implement their own specialized gimplify_va_arg_expr routines. */
4479 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4481 type = build_pointer_type (type);
4483 align = PARM_BOUNDARY / BITS_PER_UNIT;
4484 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4486 /* When we align parameter on stack for caller, if the parameter
4487 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4488 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4489 here with caller. */
4490 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4491 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4493 boundary /= BITS_PER_UNIT;
4495 /* Hoist the valist value into a temporary for the moment. */
4496 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4498 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4499 requires greater alignment, we must perform dynamic alignment. */
4500 if (boundary > align
4501 && !integer_zerop (TYPE_SIZE (type)))
4503 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4504 fold_build2 (POINTER_PLUS_EXPR,
4506 valist_tmp, size_int (boundary - 1)));
4507 gimplify_and_add (t, pre_p);
4509 t = fold_convert (sizetype, valist_tmp);
4510 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4511 fold_convert (TREE_TYPE (valist),
4512 fold_build2 (BIT_AND_EXPR, sizetype, t,
4513 size_int (-boundary))));
4514 gimplify_and_add (t, pre_p);
4519 /* If the actual alignment is less than the alignment of the type,
4520 adjust the type accordingly so that we don't assume strict alignment
4521 when dereferencing the pointer. */
4522 boundary *= BITS_PER_UNIT;
4523 if (boundary < TYPE_ALIGN (type))
4525 type = build_variant_type_copy (type);
4526 TYPE_ALIGN (type) = boundary;
4529 /* Compute the rounded size of the type. */
4530 type_size = size_in_bytes (type);
4531 rounded_size = round_up (type_size, align);
4533 /* Reduce rounded_size so it's sharable with the postqueue. */
4534 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4538 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4540 /* Small args are padded downward. */
4541 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4542 rounded_size, size_int (align));
4543 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4544 size_binop (MINUS_EXPR, rounded_size, type_size));
4545 addr = fold_build2 (POINTER_PLUS_EXPR,
4546 TREE_TYPE (addr), addr, t);
4549 /* Compute new value for AP. */
4550 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4551 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4552 gimplify_and_add (t, pre_p);
4554 addr = fold_convert (build_pointer_type (type), addr);
4557 addr = build_va_arg_indirect_ref (addr);
4559 return build_va_arg_indirect_ref (addr);
4562 /* Build an indirect-ref expression over the given TREE, which represents a
4563 piece of a va_arg() expansion. */
4565 build_va_arg_indirect_ref (tree addr)
4567 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4569 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4575 /* Return a dummy expression of type TYPE in order to keep going after an
4579 dummy_object (tree type)
4581 tree t = build_int_cst (build_pointer_type (type), 0);
4582 return build1 (INDIRECT_REF, type, t);
4585 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4586 builtin function, but a very special sort of operator. */
4588 enum gimplify_status
4589 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4591 tree promoted_type, have_va_type;
4592 tree valist = TREE_OPERAND (*expr_p, 0);
4593 tree type = TREE_TYPE (*expr_p);
4595 location_t loc = EXPR_LOCATION (*expr_p);
4597 /* Verify that valist is of the proper type. */
4598 have_va_type = TREE_TYPE (valist);
4599 if (have_va_type == error_mark_node)
4601 have_va_type = targetm.canonical_va_list_type (have_va_type);
4603 if (have_va_type == NULL_TREE)
4605 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4609 /* Generate a diagnostic for requesting data of a type that cannot
4610 be passed through `...' due to type promotion at the call site. */
4611 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4614 static bool gave_help;
4617 /* Unfortunately, this is merely undefined, rather than a constraint
4618 violation, so we cannot make this an error. If this call is never
4619 executed, the program is still strictly conforming. */
4620 warned = warning_at (loc, 0,
4621 "%qT is promoted to %qT when passed through %<...%>",
4622 type, promoted_type);
4623 if (!gave_help && warned)
4626 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4627 promoted_type, type);
4630 /* We can, however, treat "undefined" any way we please.
4631 Call abort to encourage the user to fix the program. */
4633 inform (loc, "if this code is reached, the program will abort");
4634 /* Before the abort, allow the evaluation of the va_list
4635 expression to exit or longjmp. */
4636 gimplify_and_add (valist, pre_p);
4637 t = build_call_expr_loc (loc,
4638 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4639 gimplify_and_add (t, pre_p);
4641 /* This is dead code, but go ahead and finish so that the
4642 mode of the result comes out right. */
4643 *expr_p = dummy_object (type);
4648 /* Make it easier for the backends by protecting the valist argument
4649 from multiple evaluations. */
4650 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4652 /* For this case, the backends will be expecting a pointer to
4653 TREE_TYPE (abi), but it's possible we've
4654 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4656 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4658 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4659 valist = fold_convert_loc (loc, p1,
4660 build_fold_addr_expr_loc (loc, valist));
4663 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4666 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4668 if (!targetm.gimplify_va_arg_expr)
4669 /* FIXME: Once most targets are converted we should merely
4670 assert this is non-null. */
4673 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4678 /* Expand EXP, a call to __builtin_va_end. */
4681 expand_builtin_va_end (tree exp)
4683 tree valist = CALL_EXPR_ARG (exp, 0);
4685 /* Evaluate for side effects, if needed. I hate macros that don't
4687 if (TREE_SIDE_EFFECTS (valist))
4688 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4693 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4694 builtin rather than just as an assignment in stdarg.h because of the
4695 nastiness of array-type va_list types. */
4698 expand_builtin_va_copy (tree exp)
4701 location_t loc = EXPR_LOCATION (exp);
4703 dst = CALL_EXPR_ARG (exp, 0);
4704 src = CALL_EXPR_ARG (exp, 1);
4706 dst = stabilize_va_list_loc (loc, dst, 1);
4707 src = stabilize_va_list_loc (loc, src, 0);
4709 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4711 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4713 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4714 TREE_SIDE_EFFECTS (t) = 1;
4715 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4719 rtx dstb, srcb, size;
4721 /* Evaluate to pointers. */
4722 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4723 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4724 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4725 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4727 dstb = convert_memory_address (Pmode, dstb);
4728 srcb = convert_memory_address (Pmode, srcb);
4730 /* "Dereference" to BLKmode memories. */
4731 dstb = gen_rtx_MEM (BLKmode, dstb);
4732 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4733 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4734 srcb = gen_rtx_MEM (BLKmode, srcb);
4735 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4736 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4739 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4745 /* Expand a call to one of the builtin functions __builtin_frame_address or
4746 __builtin_return_address. */
4749 expand_builtin_frame_address (tree fndecl, tree exp)
4751 /* The argument must be a nonnegative integer constant.
4752 It counts the number of frames to scan up the stack.
4753 The value is the return address saved in that frame. */
4754 if (call_expr_nargs (exp) == 0)
4755 /* Warning about missing arg was already issued. */
4757 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4759 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4760 error ("invalid argument to %<__builtin_frame_address%>");
4762 error ("invalid argument to %<__builtin_return_address%>");
4768 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4769 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4771 /* Some ports cannot access arbitrary stack frames. */
4774 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4775 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4777 warning (0, "unsupported argument to %<__builtin_return_address%>");
4781 /* For __builtin_frame_address, return what we've got. */
4782 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4786 && ! CONSTANT_P (tem))
4787 tem = copy_to_mode_reg (Pmode, tem);
4792 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4793 we failed and the caller should emit a normal call, otherwise try to get
4794 the result in TARGET, if convenient. */
4797 expand_builtin_alloca (tree exp, rtx target)
4802 /* Emit normal call if marked not-inlineable. */
4803 if (CALL_CANNOT_INLINE_P (exp))
4806 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4809 /* Compute the argument. */
4810 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4812 /* Allocate the desired space. */
4813 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4814 result = convert_memory_address (ptr_mode, result);
4819 /* Expand a call to a bswap builtin with argument ARG0. MODE
4820 is the mode to expand with. */
4823 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4825 enum machine_mode mode;
4829 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4832 arg = CALL_EXPR_ARG (exp, 0);
4833 mode = TYPE_MODE (TREE_TYPE (arg));
4834 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4836 target = expand_unop (mode, bswap_optab, op0, target, 1);
4838 gcc_assert (target);
4840 return convert_to_mode (mode, target, 0);
4843 /* Expand a call to a unary builtin in EXP.
4844 Return NULL_RTX if a normal call should be emitted rather than expanding the
4845 function in-line. If convenient, the result should be placed in TARGET.
4846 SUBTARGET may be used as the target for computing one of EXP's operands. */
4849 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4850 rtx subtarget, optab op_optab)
4854 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4857 /* Compute the argument. */
4858 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4859 VOIDmode, EXPAND_NORMAL);
4860 /* Compute op, into TARGET if possible.
4861 Set TARGET to wherever the result comes back. */
4862 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4863 op_optab, op0, target, 1);
4864 gcc_assert (target);
4866 return convert_to_mode (target_mode, target, 0);
4869 /* Expand a call to __builtin_expect. We just return our argument
4870 as the builtin_expect semantic should've been already executed by
4871 tree branch prediction pass. */
4874 expand_builtin_expect (tree exp, rtx target)
4878 if (call_expr_nargs (exp) < 2)
4880 arg = CALL_EXPR_ARG (exp, 0);
4882 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4883 /* When guessing was done, the hints should be already stripped away. */
4884 gcc_assert (!flag_guess_branch_prob
4885 || optimize == 0 || errorcount || sorrycount);
4890 expand_builtin_trap (void)
4894 emit_insn (gen_trap ());
4897 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4901 /* Expand a call to __builtin_unreachable. We do nothing except emit
4902 a barrier saying that control flow will not pass here.
4904 It is the responsibility of the program being compiled to ensure
4905 that control flow does never reach __builtin_unreachable. */
4907 expand_builtin_unreachable (void)
4912 /* Expand EXP, a call to fabs, fabsf or fabsl.
4913 Return NULL_RTX if a normal call should be emitted rather than expanding
4914 the function inline. If convenient, the result should be placed
4915 in TARGET. SUBTARGET may be used as the target for computing
4919 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4921 enum machine_mode mode;
4925 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4928 arg = CALL_EXPR_ARG (exp, 0);
4929 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4930 mode = TYPE_MODE (TREE_TYPE (arg));
4931 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4932 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4935 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4936 Return NULL is a normal call should be emitted rather than expanding the
4937 function inline. If convenient, the result should be placed in TARGET.
4938 SUBTARGET may be used as the target for computing the operand. */
4941 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4946 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4949 arg = CALL_EXPR_ARG (exp, 0);
4950 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4952 arg = CALL_EXPR_ARG (exp, 1);
4953 op1 = expand_normal (arg);
4955 return expand_copysign (op0, op1, target);
4958 /* Create a new constant string literal and return a char* pointer to it.
4959 The STRING_CST value is the LEN characters at STR. */
4961 build_string_literal (int len, const char *str)
4963 tree t, elem, index, type;
4965 t = build_string (len, str);
4966 elem = build_type_variant (char_type_node, 1, 0);
4967 index = build_index_type (size_int (len - 1));
4968 type = build_array_type (elem, index);
4969 TREE_TYPE (t) = type;
4970 TREE_CONSTANT (t) = 1;
4971 TREE_READONLY (t) = 1;
4972 TREE_STATIC (t) = 1;
4974 type = build_pointer_type (elem);
4975 t = build1 (ADDR_EXPR, type,
4976 build4 (ARRAY_REF, elem,
4977 t, integer_zero_node, NULL_TREE, NULL_TREE));
4981 /* Expand a call to either the entry or exit function profiler. */
4984 expand_builtin_profile_func (bool exitp)
4986 rtx this_rtx, which;
4988 this_rtx = DECL_RTL (current_function_decl);
4989 gcc_assert (MEM_P (this_rtx));
4990 this_rtx = XEXP (this_rtx, 0);
4993 which = profile_function_exit_libfunc;
4995 which = profile_function_entry_libfunc;
4997 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
4998 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5005 /* Expand a call to __builtin___clear_cache. */
5008 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5010 #ifndef HAVE_clear_cache
5011 #ifdef CLEAR_INSN_CACHE
5012 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5013 does something. Just do the default expansion to a call to
5017 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5018 does nothing. There is no need to call it. Do nothing. */
5020 #endif /* CLEAR_INSN_CACHE */
5022 /* We have a "clear_cache" insn, and it will handle everything. */
5024 rtx begin_rtx, end_rtx;
5025 enum insn_code icode;
5027 /* We must not expand to a library call. If we did, any
5028 fallback library function in libgcc that might contain a call to
5029 __builtin___clear_cache() would recurse infinitely. */
5030 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5032 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5036 if (HAVE_clear_cache)
5038 icode = CODE_FOR_clear_cache;
5040 begin = CALL_EXPR_ARG (exp, 0);
5041 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5042 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5043 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5044 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5046 end = CALL_EXPR_ARG (exp, 1);
5047 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5048 end_rtx = convert_memory_address (Pmode, end_rtx);
5049 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5050 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5052 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5055 #endif /* HAVE_clear_cache */
5058 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5061 round_trampoline_addr (rtx tramp)
5063 rtx temp, addend, mask;
5065 /* If we don't need too much alignment, we'll have been guaranteed
5066 proper alignment by get_trampoline_type. */
5067 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5070 /* Round address up to desired boundary. */
5071 temp = gen_reg_rtx (Pmode);
5072 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5073 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5075 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5076 temp, 0, OPTAB_LIB_WIDEN);
5077 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5078 temp, 0, OPTAB_LIB_WIDEN);
5084 expand_builtin_init_trampoline (tree exp)
5086 tree t_tramp, t_func, t_chain;
5087 rtx m_tramp, r_tramp, r_chain, tmp;
5089 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5090 POINTER_TYPE, VOID_TYPE))
5093 t_tramp = CALL_EXPR_ARG (exp, 0);
5094 t_func = CALL_EXPR_ARG (exp, 1);
5095 t_chain = CALL_EXPR_ARG (exp, 2);
5097 r_tramp = expand_normal (t_tramp);
5098 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5099 MEM_NOTRAP_P (m_tramp) = 1;
5101 /* The TRAMP argument should be the address of a field within the
5102 local function's FRAME decl. Let's see if we can fill in the
5103 to fill in the MEM_ATTRs for this memory. */
5104 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5105 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5108 tmp = round_trampoline_addr (r_tramp);
5111 m_tramp = change_address (m_tramp, BLKmode, tmp);
5112 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5113 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5116 /* The FUNC argument should be the address of the nested function.
5117 Extract the actual function decl to pass to the hook. */
5118 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5119 t_func = TREE_OPERAND (t_func, 0);
5120 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5122 r_chain = expand_normal (t_chain);
5124 /* Generate insns to initialize the trampoline. */
5125 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5127 trampolines_created = 1;
5132 expand_builtin_adjust_trampoline (tree exp)
5136 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5139 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5140 tramp = round_trampoline_addr (tramp);
5141 if (targetm.calls.trampoline_adjust_address)
5142 tramp = targetm.calls.trampoline_adjust_address (tramp);
5147 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5148 function. The function first checks whether the back end provides
5149 an insn to implement signbit for the respective mode. If not, it
5150 checks whether the floating point format of the value is such that
5151 the sign bit can be extracted. If that is not the case, the
5152 function returns NULL_RTX to indicate that a normal call should be
5153 emitted rather than expanding the function in-line. EXP is the
5154 expression that is a call to the builtin function; if convenient,
5155 the result should be placed in TARGET. */
5157 expand_builtin_signbit (tree exp, rtx target)
5159 const struct real_format *fmt;
5160 enum machine_mode fmode, imode, rmode;
5161 HOST_WIDE_INT hi, lo;
5164 enum insn_code icode;
5166 location_t loc = EXPR_LOCATION (exp);
5168 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5171 arg = CALL_EXPR_ARG (exp, 0);
5172 fmode = TYPE_MODE (TREE_TYPE (arg));
5173 rmode = TYPE_MODE (TREE_TYPE (exp));
5174 fmt = REAL_MODE_FORMAT (fmode);
5176 arg = builtin_save_expr (arg);
5178 /* Expand the argument yielding a RTX expression. */
5179 temp = expand_normal (arg);
5181 /* Check if the back end provides an insn that handles signbit for the
5183 icode = signbit_optab->handlers [(int) fmode].insn_code;
5184 if (icode != CODE_FOR_nothing)
5186 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5187 emit_unop_insn (icode, target, temp, UNKNOWN);
5191 /* For floating point formats without a sign bit, implement signbit
5193 bitpos = fmt->signbit_ro;
5196 /* But we can't do this if the format supports signed zero. */
5197 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5200 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5201 build_real (TREE_TYPE (arg), dconst0));
5202 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5205 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5207 imode = int_mode_for_mode (fmode);
5208 if (imode == BLKmode)
5210 temp = gen_lowpart (imode, temp);
5215 /* Handle targets with different FP word orders. */
5216 if (FLOAT_WORDS_BIG_ENDIAN)
5217 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5219 word = bitpos / BITS_PER_WORD;
5220 temp = operand_subword_force (temp, word, fmode);
5221 bitpos = bitpos % BITS_PER_WORD;
5224 /* Force the intermediate word_mode (or narrower) result into a
5225 register. This avoids attempting to create paradoxical SUBREGs
5226 of floating point modes below. */
5227 temp = force_reg (imode, temp);
5229 /* If the bitpos is within the "result mode" lowpart, the operation
5230 can be implement with a single bitwise AND. Otherwise, we need
5231 a right shift and an AND. */
5233 if (bitpos < GET_MODE_BITSIZE (rmode))
5235 if (bitpos < HOST_BITS_PER_WIDE_INT)
5238 lo = (HOST_WIDE_INT) 1 << bitpos;
5242 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5246 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5247 temp = gen_lowpart (rmode, temp);
5248 temp = expand_binop (rmode, and_optab, temp,
5249 immed_double_const (lo, hi, rmode),
5250 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5254 /* Perform a logical right shift to place the signbit in the least
5255 significant bit, then truncate the result to the desired mode
5256 and mask just this bit. */
5257 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5258 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5259 temp = gen_lowpart (rmode, temp);
5260 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5261 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5267 /* Expand fork or exec calls. TARGET is the desired target of the
5268 call. EXP is the call. FN is the
5269 identificator of the actual function. IGNORE is nonzero if the
5270 value is to be ignored. */
5273 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5278 /* If we are not profiling, just call the function. */
5279 if (!profile_arc_flag)
5282 /* Otherwise call the wrapper. This should be equivalent for the rest of
5283 compiler, so the code does not diverge, and the wrapper may run the
5284 code necessary for keeping the profiling sane. */
5286 switch (DECL_FUNCTION_CODE (fn))
5289 id = get_identifier ("__gcov_fork");
5292 case BUILT_IN_EXECL:
5293 id = get_identifier ("__gcov_execl");
5296 case BUILT_IN_EXECV:
5297 id = get_identifier ("__gcov_execv");
5300 case BUILT_IN_EXECLP:
5301 id = get_identifier ("__gcov_execlp");
5304 case BUILT_IN_EXECLE:
5305 id = get_identifier ("__gcov_execle");
5308 case BUILT_IN_EXECVP:
5309 id = get_identifier ("__gcov_execvp");
5312 case BUILT_IN_EXECVE:
5313 id = get_identifier ("__gcov_execve");
5320 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5321 FUNCTION_DECL, id, TREE_TYPE (fn));
5322 DECL_EXTERNAL (decl) = 1;
5323 TREE_PUBLIC (decl) = 1;
5324 DECL_ARTIFICIAL (decl) = 1;
5325 TREE_NOTHROW (decl) = 1;
5326 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5327 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5328 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5329 return expand_call (call, target, ignore);
5334 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5335 the pointer in these functions is void*, the tree optimizers may remove
5336 casts. The mode computed in expand_builtin isn't reliable either, due
5337 to __sync_bool_compare_and_swap.
5339 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5340 group of builtins. This gives us log2 of the mode size. */
5342 static inline enum machine_mode
5343 get_builtin_sync_mode (int fcode_diff)
5345 /* The size is not negotiable, so ask not to get BLKmode in return
5346 if the target indicates that a smaller size would be better. */
5347 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5350 /* Expand the memory expression LOC and return the appropriate memory operand
5351 for the builtin_sync operations. */
5354 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5358 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5359 addr = convert_memory_address (Pmode, addr);
5361 /* Note that we explicitly do not want any alias information for this
5362 memory, so that we kill all other live memories. Otherwise we don't
5363 satisfy the full barrier semantics of the intrinsic. */
5364 mem = validize_mem (gen_rtx_MEM (mode, addr));
5366 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5367 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5368 MEM_VOLATILE_P (mem) = 1;
5373 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5374 EXP is the CALL_EXPR. CODE is the rtx code
5375 that corresponds to the arithmetic or logical operation from the name;
5376 an exception here is that NOT actually means NAND. TARGET is an optional
5377 place for us to store the results; AFTER is true if this is the
5378 fetch_and_xxx form. IGNORE is true if we don't actually care about
5379 the result of the operation at all. */
5382 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5383 enum rtx_code code, bool after,
5384 rtx target, bool ignore)
5387 enum machine_mode old_mode;
5388 location_t loc = EXPR_LOCATION (exp);
5390 if (code == NOT && warn_sync_nand)
5392 tree fndecl = get_callee_fndecl (exp);
5393 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5395 static bool warned_f_a_n, warned_n_a_f;
5399 case BUILT_IN_FETCH_AND_NAND_1:
5400 case BUILT_IN_FETCH_AND_NAND_2:
5401 case BUILT_IN_FETCH_AND_NAND_4:
5402 case BUILT_IN_FETCH_AND_NAND_8:
5403 case BUILT_IN_FETCH_AND_NAND_16:
5408 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5409 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5410 warned_f_a_n = true;
5413 case BUILT_IN_NAND_AND_FETCH_1:
5414 case BUILT_IN_NAND_AND_FETCH_2:
5415 case BUILT_IN_NAND_AND_FETCH_4:
5416 case BUILT_IN_NAND_AND_FETCH_8:
5417 case BUILT_IN_NAND_AND_FETCH_16:
5422 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5423 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5424 warned_n_a_f = true;
5432 /* Expand the operands. */
5433 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5435 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5436 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5437 of CONST_INTs, where we know the old_mode only from the call argument. */
5438 old_mode = GET_MODE (val);
5439 if (old_mode == VOIDmode)
5440 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5441 val = convert_modes (mode, old_mode, val, 1);
5444 return expand_sync_operation (mem, val, code);
5446 return expand_sync_fetch_operation (mem, val, code, after, target);
5449 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5450 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5451 true if this is the boolean form. TARGET is a place for us to store the
5452 results; this is NOT optional if IS_BOOL is true. */
5455 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5456 bool is_bool, rtx target)
5458 rtx old_val, new_val, mem;
5459 enum machine_mode old_mode;
5461 /* Expand the operands. */
5462 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5465 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5466 mode, EXPAND_NORMAL);
5467 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5468 of CONST_INTs, where we know the old_mode only from the call argument. */
5469 old_mode = GET_MODE (old_val);
5470 if (old_mode == VOIDmode)
5471 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5472 old_val = convert_modes (mode, old_mode, old_val, 1);
5474 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5475 mode, EXPAND_NORMAL);
5476 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5477 of CONST_INTs, where we know the old_mode only from the call argument. */
5478 old_mode = GET_MODE (new_val);
5479 if (old_mode == VOIDmode)
5480 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5481 new_val = convert_modes (mode, old_mode, new_val, 1);
5484 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5486 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5489 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5490 general form is actually an atomic exchange, and some targets only
5491 support a reduced form with the second argument being a constant 1.
5492 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5496 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5500 enum machine_mode old_mode;
5502 /* Expand the operands. */
5503 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5504 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5505 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5506 of CONST_INTs, where we know the old_mode only from the call argument. */
5507 old_mode = GET_MODE (val);
5508 if (old_mode == VOIDmode)
5509 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5510 val = convert_modes (mode, old_mode, val, 1);
5512 return expand_sync_lock_test_and_set (mem, val, target);
5515 /* Expand the __sync_synchronize intrinsic. */
5518 expand_builtin_synchronize (void)
5521 VEC (tree, gc) *v_clobbers;
5523 #ifdef HAVE_memory_barrier
5524 if (HAVE_memory_barrier)
5526 emit_insn (gen_memory_barrier ());
5531 if (synchronize_libfunc != NULL_RTX)
5533 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5537 /* If no explicit memory barrier instruction is available, create an
5538 empty asm stmt with a memory clobber. */
5539 v_clobbers = VEC_alloc (tree, gc, 1);
5540 VEC_quick_push (tree, v_clobbers,
5541 tree_cons (NULL, build_string (6, "memory"), NULL));
5542 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5543 gimple_asm_set_volatile (x, true);
5544 expand_asm_stmt (x);
5547 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5550 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5552 enum insn_code icode;
5554 rtx val = const0_rtx;
5556 /* Expand the operands. */
5557 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5559 /* If there is an explicit operation in the md file, use it. */
5560 icode = sync_lock_release[mode];
5561 if (icode != CODE_FOR_nothing)
5563 if (!insn_data[icode].operand[1].predicate (val, mode))
5564 val = force_reg (mode, val);
5566 insn = GEN_FCN (icode) (mem, val);
5574 /* Otherwise we can implement this operation by emitting a barrier
5575 followed by a store of zero. */
5576 expand_builtin_synchronize ();
5577 emit_move_insn (mem, val);
5580 /* Expand an expression EXP that calls a built-in function,
5581 with result going to TARGET if that's convenient
5582 (and in mode MODE if that's convenient).
5583 SUBTARGET may be used as the target for computing one of EXP's operands.
5584 IGNORE is nonzero if the value is to be ignored. */
5587 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5590 tree fndecl = get_callee_fndecl (exp);
5591 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5592 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5594 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5595 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5597 /* When not optimizing, generate calls to library functions for a certain
5600 && !called_as_built_in (fndecl)
5601 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5602 && fcode != BUILT_IN_ALLOCA
5603 && fcode != BUILT_IN_FREE)
5604 return expand_call (exp, target, ignore);
5606 /* The built-in function expanders test for target == const0_rtx
5607 to determine whether the function's result will be ignored. */
5609 target = const0_rtx;
5611 /* If the result of a pure or const built-in function is ignored, and
5612 none of its arguments are volatile, we can avoid expanding the
5613 built-in call and just evaluate the arguments for side-effects. */
5614 if (target == const0_rtx
5615 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5617 bool volatilep = false;
5619 call_expr_arg_iterator iter;
5621 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5622 if (TREE_THIS_VOLATILE (arg))
5630 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5631 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5638 CASE_FLT_FN (BUILT_IN_FABS):
5639 target = expand_builtin_fabs (exp, target, subtarget);
5644 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5645 target = expand_builtin_copysign (exp, target, subtarget);
5650 /* Just do a normal library call if we were unable to fold
5652 CASE_FLT_FN (BUILT_IN_CABS):
5655 CASE_FLT_FN (BUILT_IN_EXP):
5656 CASE_FLT_FN (BUILT_IN_EXP10):
5657 CASE_FLT_FN (BUILT_IN_POW10):
5658 CASE_FLT_FN (BUILT_IN_EXP2):
5659 CASE_FLT_FN (BUILT_IN_EXPM1):
5660 CASE_FLT_FN (BUILT_IN_LOGB):
5661 CASE_FLT_FN (BUILT_IN_LOG):
5662 CASE_FLT_FN (BUILT_IN_LOG10):
5663 CASE_FLT_FN (BUILT_IN_LOG2):
5664 CASE_FLT_FN (BUILT_IN_LOG1P):
5665 CASE_FLT_FN (BUILT_IN_TAN):
5666 CASE_FLT_FN (BUILT_IN_ASIN):
5667 CASE_FLT_FN (BUILT_IN_ACOS):
5668 CASE_FLT_FN (BUILT_IN_ATAN):
5669 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5670 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5671 because of possible accuracy problems. */
5672 if (! flag_unsafe_math_optimizations)
5674 CASE_FLT_FN (BUILT_IN_SQRT):
5675 CASE_FLT_FN (BUILT_IN_FLOOR):
5676 CASE_FLT_FN (BUILT_IN_CEIL):
5677 CASE_FLT_FN (BUILT_IN_TRUNC):
5678 CASE_FLT_FN (BUILT_IN_ROUND):
5679 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5680 CASE_FLT_FN (BUILT_IN_RINT):
5681 target = expand_builtin_mathfn (exp, target, subtarget);
5686 CASE_FLT_FN (BUILT_IN_ILOGB):
5687 if (! flag_unsafe_math_optimizations)
5689 CASE_FLT_FN (BUILT_IN_ISINF):
5690 CASE_FLT_FN (BUILT_IN_FINITE):
5691 case BUILT_IN_ISFINITE:
5692 case BUILT_IN_ISNORMAL:
5693 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5698 CASE_FLT_FN (BUILT_IN_LCEIL):
5699 CASE_FLT_FN (BUILT_IN_LLCEIL):
5700 CASE_FLT_FN (BUILT_IN_LFLOOR):
5701 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5702 target = expand_builtin_int_roundingfn (exp, target);
5707 CASE_FLT_FN (BUILT_IN_LRINT):
5708 CASE_FLT_FN (BUILT_IN_LLRINT):
5709 CASE_FLT_FN (BUILT_IN_LROUND):
5710 CASE_FLT_FN (BUILT_IN_LLROUND):
5711 target = expand_builtin_int_roundingfn_2 (exp, target);
5716 CASE_FLT_FN (BUILT_IN_POW):
5717 target = expand_builtin_pow (exp, target, subtarget);
5722 CASE_FLT_FN (BUILT_IN_POWI):
5723 target = expand_builtin_powi (exp, target, subtarget);
5728 CASE_FLT_FN (BUILT_IN_ATAN2):
5729 CASE_FLT_FN (BUILT_IN_LDEXP):
5730 CASE_FLT_FN (BUILT_IN_SCALB):
5731 CASE_FLT_FN (BUILT_IN_SCALBN):
5732 CASE_FLT_FN (BUILT_IN_SCALBLN):
5733 if (! flag_unsafe_math_optimizations)
5736 CASE_FLT_FN (BUILT_IN_FMOD):
5737 CASE_FLT_FN (BUILT_IN_REMAINDER):
5738 CASE_FLT_FN (BUILT_IN_DREM):
5739 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5744 CASE_FLT_FN (BUILT_IN_CEXPI):
5745 target = expand_builtin_cexpi (exp, target, subtarget);
5746 gcc_assert (target);
5749 CASE_FLT_FN (BUILT_IN_SIN):
5750 CASE_FLT_FN (BUILT_IN_COS):
5751 if (! flag_unsafe_math_optimizations)
5753 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5758 CASE_FLT_FN (BUILT_IN_SINCOS):
5759 if (! flag_unsafe_math_optimizations)
5761 target = expand_builtin_sincos (exp);
5766 case BUILT_IN_APPLY_ARGS:
5767 return expand_builtin_apply_args ();
5769 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5770 FUNCTION with a copy of the parameters described by
5771 ARGUMENTS, and ARGSIZE. It returns a block of memory
5772 allocated on the stack into which is stored all the registers
5773 that might possibly be used for returning the result of a
5774 function. ARGUMENTS is the value returned by
5775 __builtin_apply_args. ARGSIZE is the number of bytes of
5776 arguments that must be copied. ??? How should this value be
5777 computed? We'll also need a safe worst case value for varargs
5779 case BUILT_IN_APPLY:
5780 if (!validate_arglist (exp, POINTER_TYPE,
5781 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5782 && !validate_arglist (exp, REFERENCE_TYPE,
5783 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5789 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5790 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5791 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5793 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5796 /* __builtin_return (RESULT) causes the function to return the
5797 value described by RESULT. RESULT is address of the block of
5798 memory returned by __builtin_apply. */
5799 case BUILT_IN_RETURN:
5800 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5801 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5804 case BUILT_IN_SAVEREGS:
5805 return expand_builtin_saveregs ();
5807 case BUILT_IN_ARGS_INFO:
5808 return expand_builtin_args_info (exp);
5810 case BUILT_IN_VA_ARG_PACK:
5811 /* All valid uses of __builtin_va_arg_pack () are removed during
5813 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5816 case BUILT_IN_VA_ARG_PACK_LEN:
5817 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5819 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5822 /* Return the address of the first anonymous stack arg. */
5823 case BUILT_IN_NEXT_ARG:
5824 if (fold_builtin_next_arg (exp, false))
5826 return expand_builtin_next_arg ();
5828 case BUILT_IN_CLEAR_CACHE:
5829 target = expand_builtin___clear_cache (exp);
5834 case BUILT_IN_CLASSIFY_TYPE:
5835 return expand_builtin_classify_type (exp);
5837 case BUILT_IN_CONSTANT_P:
5840 case BUILT_IN_FRAME_ADDRESS:
5841 case BUILT_IN_RETURN_ADDRESS:
5842 return expand_builtin_frame_address (fndecl, exp);
5844 /* Returns the address of the area where the structure is returned.
5846 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5847 if (call_expr_nargs (exp) != 0
5848 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5849 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5852 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5854 case BUILT_IN_ALLOCA:
5855 target = expand_builtin_alloca (exp, target);
5860 case BUILT_IN_STACK_SAVE:
5861 return expand_stack_save ();
5863 case BUILT_IN_STACK_RESTORE:
5864 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5867 case BUILT_IN_BSWAP32:
5868 case BUILT_IN_BSWAP64:
5869 target = expand_builtin_bswap (exp, target, subtarget);
5875 CASE_INT_FN (BUILT_IN_FFS):
5876 case BUILT_IN_FFSIMAX:
5877 target = expand_builtin_unop (target_mode, exp, target,
5878 subtarget, ffs_optab);
5883 CASE_INT_FN (BUILT_IN_CLZ):
5884 case BUILT_IN_CLZIMAX:
5885 target = expand_builtin_unop (target_mode, exp, target,
5886 subtarget, clz_optab);
5891 CASE_INT_FN (BUILT_IN_CTZ):
5892 case BUILT_IN_CTZIMAX:
5893 target = expand_builtin_unop (target_mode, exp, target,
5894 subtarget, ctz_optab);
5899 CASE_INT_FN (BUILT_IN_POPCOUNT):
5900 case BUILT_IN_POPCOUNTIMAX:
5901 target = expand_builtin_unop (target_mode, exp, target,
5902 subtarget, popcount_optab);
5907 CASE_INT_FN (BUILT_IN_PARITY):
5908 case BUILT_IN_PARITYIMAX:
5909 target = expand_builtin_unop (target_mode, exp, target,
5910 subtarget, parity_optab);
5915 case BUILT_IN_STRLEN:
5916 target = expand_builtin_strlen (exp, target, target_mode);
5921 case BUILT_IN_STRCPY:
5922 target = expand_builtin_strcpy (exp, target);
5927 case BUILT_IN_STRNCPY:
5928 target = expand_builtin_strncpy (exp, target);
5933 case BUILT_IN_STPCPY:
5934 target = expand_builtin_stpcpy (exp, target, mode);
5939 case BUILT_IN_MEMCPY:
5940 target = expand_builtin_memcpy (exp, target);
5945 case BUILT_IN_MEMPCPY:
5946 target = expand_builtin_mempcpy (exp, target, mode);
5951 case BUILT_IN_MEMSET:
5952 target = expand_builtin_memset (exp, target, mode);
5957 case BUILT_IN_BZERO:
5958 target = expand_builtin_bzero (exp);
5963 case BUILT_IN_STRCMP:
5964 target = expand_builtin_strcmp (exp, target);
5969 case BUILT_IN_STRNCMP:
5970 target = expand_builtin_strncmp (exp, target, mode);
5976 case BUILT_IN_MEMCMP:
5977 target = expand_builtin_memcmp (exp, target, mode);
5982 case BUILT_IN_SETJMP:
5983 /* This should have been lowered to the builtins below. */
5986 case BUILT_IN_SETJMP_SETUP:
5987 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5988 and the receiver label. */
5989 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5991 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5992 VOIDmode, EXPAND_NORMAL);
5993 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5994 rtx label_r = label_rtx (label);
5996 /* This is copied from the handling of non-local gotos. */
5997 expand_builtin_setjmp_setup (buf_addr, label_r);
5998 nonlocal_goto_handler_labels
5999 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6000 nonlocal_goto_handler_labels);
6001 /* ??? Do not let expand_label treat us as such since we would
6002 not want to be both on the list of non-local labels and on
6003 the list of forced labels. */
6004 FORCED_LABEL (label) = 0;
6009 case BUILT_IN_SETJMP_DISPATCHER:
6010 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6011 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6013 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6014 rtx label_r = label_rtx (label);
6016 /* Remove the dispatcher label from the list of non-local labels
6017 since the receiver labels have been added to it above. */
6018 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6023 case BUILT_IN_SETJMP_RECEIVER:
6024 /* __builtin_setjmp_receiver is passed the receiver label. */
6025 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6027 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6028 rtx label_r = label_rtx (label);
6030 expand_builtin_setjmp_receiver (label_r);
6035 /* __builtin_longjmp is passed a pointer to an array of five words.
6036 It's similar to the C library longjmp function but works with
6037 __builtin_setjmp above. */
6038 case BUILT_IN_LONGJMP:
6039 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6041 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6042 VOIDmode, EXPAND_NORMAL);
6043 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6045 if (value != const1_rtx)
6047 error ("%<__builtin_longjmp%> second argument must be 1");
6051 expand_builtin_longjmp (buf_addr, value);
6056 case BUILT_IN_NONLOCAL_GOTO:
6057 target = expand_builtin_nonlocal_goto (exp);
6062 /* This updates the setjmp buffer that is its argument with the value
6063 of the current stack pointer. */
6064 case BUILT_IN_UPDATE_SETJMP_BUF:
6065 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6068 = expand_normal (CALL_EXPR_ARG (exp, 0));
6070 expand_builtin_update_setjmp_buf (buf_addr);
6076 expand_builtin_trap ();
6079 case BUILT_IN_UNREACHABLE:
6080 expand_builtin_unreachable ();
6083 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6084 case BUILT_IN_SIGNBITD32:
6085 case BUILT_IN_SIGNBITD64:
6086 case BUILT_IN_SIGNBITD128:
6087 target = expand_builtin_signbit (exp, target);
6092 /* Various hooks for the DWARF 2 __throw routine. */
6093 case BUILT_IN_UNWIND_INIT:
6094 expand_builtin_unwind_init ();
6096 case BUILT_IN_DWARF_CFA:
6097 return virtual_cfa_rtx;
6098 #ifdef DWARF2_UNWIND_INFO
6099 case BUILT_IN_DWARF_SP_COLUMN:
6100 return expand_builtin_dwarf_sp_column ();
6101 case BUILT_IN_INIT_DWARF_REG_SIZES:
6102 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6105 case BUILT_IN_FROB_RETURN_ADDR:
6106 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6107 case BUILT_IN_EXTRACT_RETURN_ADDR:
6108 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6109 case BUILT_IN_EH_RETURN:
6110 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6111 CALL_EXPR_ARG (exp, 1));
6113 #ifdef EH_RETURN_DATA_REGNO
6114 case BUILT_IN_EH_RETURN_DATA_REGNO:
6115 return expand_builtin_eh_return_data_regno (exp);
6117 case BUILT_IN_EXTEND_POINTER:
6118 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6119 case BUILT_IN_EH_POINTER:
6120 return expand_builtin_eh_pointer (exp);
6121 case BUILT_IN_EH_FILTER:
6122 return expand_builtin_eh_filter (exp);
6123 case BUILT_IN_EH_COPY_VALUES:
6124 return expand_builtin_eh_copy_values (exp);
6126 case BUILT_IN_VA_START:
6127 return expand_builtin_va_start (exp);
6128 case BUILT_IN_VA_END:
6129 return expand_builtin_va_end (exp);
6130 case BUILT_IN_VA_COPY:
6131 return expand_builtin_va_copy (exp);
6132 case BUILT_IN_EXPECT:
6133 return expand_builtin_expect (exp, target);
6134 case BUILT_IN_PREFETCH:
6135 expand_builtin_prefetch (exp);
6138 case BUILT_IN_PROFILE_FUNC_ENTER:
6139 return expand_builtin_profile_func (false);
6140 case BUILT_IN_PROFILE_FUNC_EXIT:
6141 return expand_builtin_profile_func (true);
6143 case BUILT_IN_INIT_TRAMPOLINE:
6144 return expand_builtin_init_trampoline (exp);
6145 case BUILT_IN_ADJUST_TRAMPOLINE:
6146 return expand_builtin_adjust_trampoline (exp);
6149 case BUILT_IN_EXECL:
6150 case BUILT_IN_EXECV:
6151 case BUILT_IN_EXECLP:
6152 case BUILT_IN_EXECLE:
6153 case BUILT_IN_EXECVP:
6154 case BUILT_IN_EXECVE:
6155 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6160 case BUILT_IN_FETCH_AND_ADD_1:
6161 case BUILT_IN_FETCH_AND_ADD_2:
6162 case BUILT_IN_FETCH_AND_ADD_4:
6163 case BUILT_IN_FETCH_AND_ADD_8:
6164 case BUILT_IN_FETCH_AND_ADD_16:
6165 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6166 target = expand_builtin_sync_operation (mode, exp, PLUS,
6167 false, target, ignore);
6172 case BUILT_IN_FETCH_AND_SUB_1:
6173 case BUILT_IN_FETCH_AND_SUB_2:
6174 case BUILT_IN_FETCH_AND_SUB_4:
6175 case BUILT_IN_FETCH_AND_SUB_8:
6176 case BUILT_IN_FETCH_AND_SUB_16:
6177 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6178 target = expand_builtin_sync_operation (mode, exp, MINUS,
6179 false, target, ignore);
6184 case BUILT_IN_FETCH_AND_OR_1:
6185 case BUILT_IN_FETCH_AND_OR_2:
6186 case BUILT_IN_FETCH_AND_OR_4:
6187 case BUILT_IN_FETCH_AND_OR_8:
6188 case BUILT_IN_FETCH_AND_OR_16:
6189 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6190 target = expand_builtin_sync_operation (mode, exp, IOR,
6191 false, target, ignore);
6196 case BUILT_IN_FETCH_AND_AND_1:
6197 case BUILT_IN_FETCH_AND_AND_2:
6198 case BUILT_IN_FETCH_AND_AND_4:
6199 case BUILT_IN_FETCH_AND_AND_8:
6200 case BUILT_IN_FETCH_AND_AND_16:
6201 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6202 target = expand_builtin_sync_operation (mode, exp, AND,
6203 false, target, ignore);
6208 case BUILT_IN_FETCH_AND_XOR_1:
6209 case BUILT_IN_FETCH_AND_XOR_2:
6210 case BUILT_IN_FETCH_AND_XOR_4:
6211 case BUILT_IN_FETCH_AND_XOR_8:
6212 case BUILT_IN_FETCH_AND_XOR_16:
6213 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6214 target = expand_builtin_sync_operation (mode, exp, XOR,
6215 false, target, ignore);
6220 case BUILT_IN_FETCH_AND_NAND_1:
6221 case BUILT_IN_FETCH_AND_NAND_2:
6222 case BUILT_IN_FETCH_AND_NAND_4:
6223 case BUILT_IN_FETCH_AND_NAND_8:
6224 case BUILT_IN_FETCH_AND_NAND_16:
6225 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6226 target = expand_builtin_sync_operation (mode, exp, NOT,
6227 false, target, ignore);
6232 case BUILT_IN_ADD_AND_FETCH_1:
6233 case BUILT_IN_ADD_AND_FETCH_2:
6234 case BUILT_IN_ADD_AND_FETCH_4:
6235 case BUILT_IN_ADD_AND_FETCH_8:
6236 case BUILT_IN_ADD_AND_FETCH_16:
6237 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6238 target = expand_builtin_sync_operation (mode, exp, PLUS,
6239 true, target, ignore);
6244 case BUILT_IN_SUB_AND_FETCH_1:
6245 case BUILT_IN_SUB_AND_FETCH_2:
6246 case BUILT_IN_SUB_AND_FETCH_4:
6247 case BUILT_IN_SUB_AND_FETCH_8:
6248 case BUILT_IN_SUB_AND_FETCH_16:
6249 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6250 target = expand_builtin_sync_operation (mode, exp, MINUS,
6251 true, target, ignore);
6256 case BUILT_IN_OR_AND_FETCH_1:
6257 case BUILT_IN_OR_AND_FETCH_2:
6258 case BUILT_IN_OR_AND_FETCH_4:
6259 case BUILT_IN_OR_AND_FETCH_8:
6260 case BUILT_IN_OR_AND_FETCH_16:
6261 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6262 target = expand_builtin_sync_operation (mode, exp, IOR,
6263 true, target, ignore);
6268 case BUILT_IN_AND_AND_FETCH_1:
6269 case BUILT_IN_AND_AND_FETCH_2:
6270 case BUILT_IN_AND_AND_FETCH_4:
6271 case BUILT_IN_AND_AND_FETCH_8:
6272 case BUILT_IN_AND_AND_FETCH_16:
6273 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6274 target = expand_builtin_sync_operation (mode, exp, AND,
6275 true, target, ignore);
6280 case BUILT_IN_XOR_AND_FETCH_1:
6281 case BUILT_IN_XOR_AND_FETCH_2:
6282 case BUILT_IN_XOR_AND_FETCH_4:
6283 case BUILT_IN_XOR_AND_FETCH_8:
6284 case BUILT_IN_XOR_AND_FETCH_16:
6285 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6286 target = expand_builtin_sync_operation (mode, exp, XOR,
6287 true, target, ignore);
6292 case BUILT_IN_NAND_AND_FETCH_1:
6293 case BUILT_IN_NAND_AND_FETCH_2:
6294 case BUILT_IN_NAND_AND_FETCH_4:
6295 case BUILT_IN_NAND_AND_FETCH_8:
6296 case BUILT_IN_NAND_AND_FETCH_16:
6297 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6298 target = expand_builtin_sync_operation (mode, exp, NOT,
6299 true, target, ignore);
6304 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6305 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6306 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6307 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6308 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6309 if (mode == VOIDmode)
6310 mode = TYPE_MODE (boolean_type_node);
6311 if (!target || !register_operand (target, mode))
6312 target = gen_reg_rtx (mode);
6314 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6315 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6320 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6321 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6322 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6323 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6324 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6325 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6326 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6331 case BUILT_IN_LOCK_TEST_AND_SET_1:
6332 case BUILT_IN_LOCK_TEST_AND_SET_2:
6333 case BUILT_IN_LOCK_TEST_AND_SET_4:
6334 case BUILT_IN_LOCK_TEST_AND_SET_8:
6335 case BUILT_IN_LOCK_TEST_AND_SET_16:
6336 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6337 target = expand_builtin_lock_test_and_set (mode, exp, target);
6342 case BUILT_IN_LOCK_RELEASE_1:
6343 case BUILT_IN_LOCK_RELEASE_2:
6344 case BUILT_IN_LOCK_RELEASE_4:
6345 case BUILT_IN_LOCK_RELEASE_8:
6346 case BUILT_IN_LOCK_RELEASE_16:
6347 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6348 expand_builtin_lock_release (mode, exp);
6351 case BUILT_IN_SYNCHRONIZE:
6352 expand_builtin_synchronize ();
6355 case BUILT_IN_OBJECT_SIZE:
6356 return expand_builtin_object_size (exp);
6358 case BUILT_IN_MEMCPY_CHK:
6359 case BUILT_IN_MEMPCPY_CHK:
6360 case BUILT_IN_MEMMOVE_CHK:
6361 case BUILT_IN_MEMSET_CHK:
6362 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6367 case BUILT_IN_STRCPY_CHK:
6368 case BUILT_IN_STPCPY_CHK:
6369 case BUILT_IN_STRNCPY_CHK:
6370 case BUILT_IN_STRCAT_CHK:
6371 case BUILT_IN_STRNCAT_CHK:
6372 case BUILT_IN_SNPRINTF_CHK:
6373 case BUILT_IN_VSNPRINTF_CHK:
6374 maybe_emit_chk_warning (exp, fcode);
6377 case BUILT_IN_SPRINTF_CHK:
6378 case BUILT_IN_VSPRINTF_CHK:
6379 maybe_emit_sprintf_chk_warning (exp, fcode);
6383 maybe_emit_free_warning (exp);
6386 default: /* just do library call, if unknown builtin */
6390 /* The switch statement above can drop through to cause the function
6391 to be called normally. */
6392 return expand_call (exp, target, ignore);
6395 /* Determine whether a tree node represents a call to a built-in
6396 function. If the tree T is a call to a built-in function with
6397 the right number of arguments of the appropriate types, return
6398 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6399 Otherwise the return value is END_BUILTINS. */
6401 enum built_in_function
6402 builtin_mathfn_code (const_tree t)
6404 const_tree fndecl, arg, parmlist;
6405 const_tree argtype, parmtype;
6406 const_call_expr_arg_iterator iter;
6408 if (TREE_CODE (t) != CALL_EXPR
6409 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6410 return END_BUILTINS;
6412 fndecl = get_callee_fndecl (t);
6413 if (fndecl == NULL_TREE
6414 || TREE_CODE (fndecl) != FUNCTION_DECL
6415 || ! DECL_BUILT_IN (fndecl)
6416 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6417 return END_BUILTINS;
6419 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6420 init_const_call_expr_arg_iterator (t, &iter);
6421 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6423 /* If a function doesn't take a variable number of arguments,
6424 the last element in the list will have type `void'. */
6425 parmtype = TREE_VALUE (parmlist);
6426 if (VOID_TYPE_P (parmtype))
6428 if (more_const_call_expr_args_p (&iter))
6429 return END_BUILTINS;
6430 return DECL_FUNCTION_CODE (fndecl);
6433 if (! more_const_call_expr_args_p (&iter))
6434 return END_BUILTINS;
6436 arg = next_const_call_expr_arg (&iter);
6437 argtype = TREE_TYPE (arg);
6439 if (SCALAR_FLOAT_TYPE_P (parmtype))
6441 if (! SCALAR_FLOAT_TYPE_P (argtype))
6442 return END_BUILTINS;
6444 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6446 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6447 return END_BUILTINS;
6449 else if (POINTER_TYPE_P (parmtype))
6451 if (! POINTER_TYPE_P (argtype))
6452 return END_BUILTINS;
6454 else if (INTEGRAL_TYPE_P (parmtype))
6456 if (! INTEGRAL_TYPE_P (argtype))
6457 return END_BUILTINS;
6460 return END_BUILTINS;
6463 /* Variable-length argument list. */
6464 return DECL_FUNCTION_CODE (fndecl);
6467 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6468 evaluate to a constant. */
6471 fold_builtin_constant_p (tree arg)
6473 /* We return 1 for a numeric type that's known to be a constant
6474 value at compile-time or for an aggregate type that's a
6475 literal constant. */
6478 /* If we know this is a constant, emit the constant of one. */
6479 if (CONSTANT_CLASS_P (arg)
6480 || (TREE_CODE (arg) == CONSTRUCTOR
6481 && TREE_CONSTANT (arg)))
6482 return integer_one_node;
6483 if (TREE_CODE (arg) == ADDR_EXPR)
6485 tree op = TREE_OPERAND (arg, 0);
6486 if (TREE_CODE (op) == STRING_CST
6487 || (TREE_CODE (op) == ARRAY_REF
6488 && integer_zerop (TREE_OPERAND (op, 1))
6489 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6490 return integer_one_node;
6493 /* If this expression has side effects, show we don't know it to be a
6494 constant. Likewise if it's a pointer or aggregate type since in
6495 those case we only want literals, since those are only optimized
6496 when generating RTL, not later.
6497 And finally, if we are compiling an initializer, not code, we
6498 need to return a definite result now; there's not going to be any
6499 more optimization done. */
6500 if (TREE_SIDE_EFFECTS (arg)
6501 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6502 || POINTER_TYPE_P (TREE_TYPE (arg))
6504 || folding_initializer)
6505 return integer_zero_node;
6510 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6511 return it as a truthvalue. */
6514 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6516 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6518 fn = built_in_decls[BUILT_IN_EXPECT];
6519 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6520 ret_type = TREE_TYPE (TREE_TYPE (fn));
6521 pred_type = TREE_VALUE (arg_types);
6522 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6524 pred = fold_convert_loc (loc, pred_type, pred);
6525 expected = fold_convert_loc (loc, expected_type, expected);
6526 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6528 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6529 build_int_cst (ret_type, 0));
6532 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6533 NULL_TREE if no simplification is possible. */
6536 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6539 enum tree_code code;
6541 /* If this is a builtin_expect within a builtin_expect keep the
6542 inner one. See through a comparison against a constant. It
6543 might have been added to create a thruthvalue. */
6545 if (COMPARISON_CLASS_P (inner)
6546 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6547 inner = TREE_OPERAND (inner, 0);
6549 if (TREE_CODE (inner) == CALL_EXPR
6550 && (fndecl = get_callee_fndecl (inner))
6551 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6552 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6555 /* Distribute the expected value over short-circuiting operators.
6556 See through the cast from truthvalue_type_node to long. */
6558 while (TREE_CODE (inner) == NOP_EXPR
6559 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6560 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6561 inner = TREE_OPERAND (inner, 0);
6563 code = TREE_CODE (inner);
6564 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6566 tree op0 = TREE_OPERAND (inner, 0);
6567 tree op1 = TREE_OPERAND (inner, 1);
6569 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6570 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6571 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6573 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6576 /* If the argument isn't invariant then there's nothing else we can do. */
6577 if (!TREE_CONSTANT (arg0))
6580 /* If we expect that a comparison against the argument will fold to
6581 a constant return the constant. In practice, this means a true
6582 constant or the address of a non-weak symbol. */
6585 if (TREE_CODE (inner) == ADDR_EXPR)
6589 inner = TREE_OPERAND (inner, 0);
6591 while (TREE_CODE (inner) == COMPONENT_REF
6592 || TREE_CODE (inner) == ARRAY_REF);
6593 if ((TREE_CODE (inner) == VAR_DECL
6594 || TREE_CODE (inner) == FUNCTION_DECL)
6595 && DECL_WEAK (inner))
6599 /* Otherwise, ARG0 already has the proper type for the return value. */
6603 /* Fold a call to __builtin_classify_type with argument ARG. */
6606 fold_builtin_classify_type (tree arg)
6609 return build_int_cst (NULL_TREE, no_type_class);
6611 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6614 /* Fold a call to __builtin_strlen with argument ARG. */
6617 fold_builtin_strlen (location_t loc, tree arg)
6619 if (!validate_arg (arg, POINTER_TYPE))
6623 tree len = c_strlen (arg, 0);
6627 /* Convert from the internal "sizetype" type to "size_t". */
6629 len = fold_convert_loc (loc, size_type_node, len);
6637 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6640 fold_builtin_inf (location_t loc, tree type, int warn)
6642 REAL_VALUE_TYPE real;
6644 /* __builtin_inff is intended to be usable to define INFINITY on all
6645 targets. If an infinity is not available, INFINITY expands "to a
6646 positive constant of type float that overflows at translation
6647 time", footnote "In this case, using INFINITY will violate the
6648 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6649 Thus we pedwarn to ensure this constraint violation is
6651 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6652 pedwarn (loc, 0, "target format does not support infinity");
6655 return build_real (type, real);
6658 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6661 fold_builtin_nan (tree arg, tree type, int quiet)
6663 REAL_VALUE_TYPE real;
6666 if (!validate_arg (arg, POINTER_TYPE))
6668 str = c_getstr (arg);
6672 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6675 return build_real (type, real);
6678 /* Return true if the floating point expression T has an integer value.
6679 We also allow +Inf, -Inf and NaN to be considered integer values. */
6682 integer_valued_real_p (tree t)
6684 switch (TREE_CODE (t))
6691 return integer_valued_real_p (TREE_OPERAND (t, 0));
6696 return integer_valued_real_p (TREE_OPERAND (t, 1));
6703 return integer_valued_real_p (TREE_OPERAND (t, 0))
6704 && integer_valued_real_p (TREE_OPERAND (t, 1));
6707 return integer_valued_real_p (TREE_OPERAND (t, 1))
6708 && integer_valued_real_p (TREE_OPERAND (t, 2));
6711 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6715 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6716 if (TREE_CODE (type) == INTEGER_TYPE)
6718 if (TREE_CODE (type) == REAL_TYPE)
6719 return integer_valued_real_p (TREE_OPERAND (t, 0));
6724 switch (builtin_mathfn_code (t))
6726 CASE_FLT_FN (BUILT_IN_CEIL):
6727 CASE_FLT_FN (BUILT_IN_FLOOR):
6728 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6729 CASE_FLT_FN (BUILT_IN_RINT):
6730 CASE_FLT_FN (BUILT_IN_ROUND):
6731 CASE_FLT_FN (BUILT_IN_TRUNC):
6734 CASE_FLT_FN (BUILT_IN_FMIN):
6735 CASE_FLT_FN (BUILT_IN_FMAX):
6736 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6737 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6750 /* FNDECL is assumed to be a builtin where truncation can be propagated
6751 across (for instance floor((double)f) == (double)floorf (f).
6752 Do the transformation for a call with argument ARG. */
6755 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6757 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6759 if (!validate_arg (arg, REAL_TYPE))
6762 /* Integer rounding functions are idempotent. */
6763 if (fcode == builtin_mathfn_code (arg))
6766 /* If argument is already integer valued, and we don't need to worry
6767 about setting errno, there's no need to perform rounding. */
6768 if (! flag_errno_math && integer_valued_real_p (arg))
6773 tree arg0 = strip_float_extensions (arg);
6774 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6775 tree newtype = TREE_TYPE (arg0);
6778 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6779 && (decl = mathfn_built_in (newtype, fcode)))
6780 return fold_convert_loc (loc, ftype,
6781 build_call_expr_loc (loc, decl, 1,
6782 fold_convert_loc (loc,
6789 /* FNDECL is assumed to be builtin which can narrow the FP type of
6790 the argument, for instance lround((double)f) -> lroundf (f).
6791 Do the transformation for a call with argument ARG. */
6794 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6796 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6798 if (!validate_arg (arg, REAL_TYPE))
6801 /* If argument is already integer valued, and we don't need to worry
6802 about setting errno, there's no need to perform rounding. */
6803 if (! flag_errno_math && integer_valued_real_p (arg))
6804 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6805 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6809 tree ftype = TREE_TYPE (arg);
6810 tree arg0 = strip_float_extensions (arg);
6811 tree newtype = TREE_TYPE (arg0);
6814 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6815 && (decl = mathfn_built_in (newtype, fcode)))
6816 return build_call_expr_loc (loc, decl, 1,
6817 fold_convert_loc (loc, newtype, arg0));
6820 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6821 sizeof (long long) == sizeof (long). */
6822 if (TYPE_PRECISION (long_long_integer_type_node)
6823 == TYPE_PRECISION (long_integer_type_node))
6825 tree newfn = NULL_TREE;
6828 CASE_FLT_FN (BUILT_IN_LLCEIL):
6829 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6832 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6833 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6836 CASE_FLT_FN (BUILT_IN_LLROUND):
6837 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6840 CASE_FLT_FN (BUILT_IN_LLRINT):
6841 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6850 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6851 return fold_convert_loc (loc,
6852 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6859 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6860 return type. Return NULL_TREE if no simplification can be made. */
6863 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6867 if (!validate_arg (arg, COMPLEX_TYPE)
6868 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6871 /* Calculate the result when the argument is a constant. */
6872 if (TREE_CODE (arg) == COMPLEX_CST
6873 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6877 if (TREE_CODE (arg) == COMPLEX_EXPR)
6879 tree real = TREE_OPERAND (arg, 0);
6880 tree imag = TREE_OPERAND (arg, 1);
6882 /* If either part is zero, cabs is fabs of the other. */
6883 if (real_zerop (real))
6884 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6885 if (real_zerop (imag))
6886 return fold_build1_loc (loc, ABS_EXPR, type, real);
6888 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6889 if (flag_unsafe_math_optimizations
6890 && operand_equal_p (real, imag, OEP_PURE_SAME))
6892 const REAL_VALUE_TYPE sqrt2_trunc
6893 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6895 return fold_build2_loc (loc, MULT_EXPR, type,
6896 fold_build1_loc (loc, ABS_EXPR, type, real),
6897 build_real (type, sqrt2_trunc));
6901 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6902 if (TREE_CODE (arg) == NEGATE_EXPR
6903 || TREE_CODE (arg) == CONJ_EXPR)
6904 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6906 /* Don't do this when optimizing for size. */
6907 if (flag_unsafe_math_optimizations
6908 && optimize && optimize_function_for_speed_p (cfun))
6910 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6912 if (sqrtfn != NULL_TREE)
6914 tree rpart, ipart, result;
6916 arg = builtin_save_expr (arg);
6918 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6919 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6921 rpart = builtin_save_expr (rpart);
6922 ipart = builtin_save_expr (ipart);
6924 result = fold_build2_loc (loc, PLUS_EXPR, type,
6925 fold_build2_loc (loc, MULT_EXPR, type,
6927 fold_build2_loc (loc, MULT_EXPR, type,
6930 return build_call_expr_loc (loc, sqrtfn, 1, result);
6937 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6938 Return NULL_TREE if no simplification can be made. */
6941 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6944 enum built_in_function fcode;
6947 if (!validate_arg (arg, REAL_TYPE))
6950 /* Calculate the result when the argument is a constant. */
6951 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6954 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6955 fcode = builtin_mathfn_code (arg);
6956 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6958 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6959 arg = fold_build2_loc (loc, MULT_EXPR, type,
6960 CALL_EXPR_ARG (arg, 0),
6961 build_real (type, dconsthalf));
6962 return build_call_expr_loc (loc, expfn, 1, arg);
6965 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6966 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6968 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6972 tree arg0 = CALL_EXPR_ARG (arg, 0);
6974 /* The inner root was either sqrt or cbrt. */
6975 /* This was a conditional expression but it triggered a bug
6977 REAL_VALUE_TYPE dconstroot;
6978 if (BUILTIN_SQRT_P (fcode))
6979 dconstroot = dconsthalf;
6981 dconstroot = dconst_third ();
6983 /* Adjust for the outer root. */
6984 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6985 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6986 tree_root = build_real (type, dconstroot);
6987 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6991 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6992 if (flag_unsafe_math_optimizations
6993 && (fcode == BUILT_IN_POW
6994 || fcode == BUILT_IN_POWF
6995 || fcode == BUILT_IN_POWL))
6997 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6998 tree arg0 = CALL_EXPR_ARG (arg, 0);
6999 tree arg1 = CALL_EXPR_ARG (arg, 1);
7001 if (!tree_expr_nonnegative_p (arg0))
7002 arg0 = build1 (ABS_EXPR, type, arg0);
7003 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7004 build_real (type, dconsthalf));
7005 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7011 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7012 Return NULL_TREE if no simplification can be made. */
7015 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7017 const enum built_in_function fcode = builtin_mathfn_code (arg);
7020 if (!validate_arg (arg, REAL_TYPE))
7023 /* Calculate the result when the argument is a constant. */
7024 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7027 if (flag_unsafe_math_optimizations)
7029 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7030 if (BUILTIN_EXPONENT_P (fcode))
7032 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7033 const REAL_VALUE_TYPE third_trunc =
7034 real_value_truncate (TYPE_MODE (type), dconst_third ());
7035 arg = fold_build2_loc (loc, MULT_EXPR, type,
7036 CALL_EXPR_ARG (arg, 0),
7037 build_real (type, third_trunc));
7038 return build_call_expr_loc (loc, expfn, 1, arg);
7041 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7042 if (BUILTIN_SQRT_P (fcode))
7044 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7048 tree arg0 = CALL_EXPR_ARG (arg, 0);
7050 REAL_VALUE_TYPE dconstroot = dconst_third ();
7052 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7053 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7054 tree_root = build_real (type, dconstroot);
7055 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7059 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7060 if (BUILTIN_CBRT_P (fcode))
7062 tree arg0 = CALL_EXPR_ARG (arg, 0);
7063 if (tree_expr_nonnegative_p (arg0))
7065 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7070 REAL_VALUE_TYPE dconstroot;
7072 real_arithmetic (&dconstroot, MULT_EXPR,
7073 dconst_third_ptr (), dconst_third_ptr ());
7074 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7075 tree_root = build_real (type, dconstroot);
7076 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7081 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7082 if (fcode == BUILT_IN_POW
7083 || fcode == BUILT_IN_POWF
7084 || fcode == BUILT_IN_POWL)
7086 tree arg00 = CALL_EXPR_ARG (arg, 0);
7087 tree arg01 = CALL_EXPR_ARG (arg, 1);
7088 if (tree_expr_nonnegative_p (arg00))
7090 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7091 const REAL_VALUE_TYPE dconstroot
7092 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7093 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7094 build_real (type, dconstroot));
7095 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7102 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7103 TYPE is the type of the return value. Return NULL_TREE if no
7104 simplification can be made. */
7107 fold_builtin_cos (location_t loc,
7108 tree arg, tree type, tree fndecl)
7112 if (!validate_arg (arg, REAL_TYPE))
7115 /* Calculate the result when the argument is a constant. */
7116 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7119 /* Optimize cos(-x) into cos (x). */
7120 if ((narg = fold_strip_sign_ops (arg)))
7121 return build_call_expr_loc (loc, fndecl, 1, narg);
7126 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7127 Return NULL_TREE if no simplification can be made. */
7130 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7132 if (validate_arg (arg, REAL_TYPE))
7136 /* Calculate the result when the argument is a constant. */
7137 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7140 /* Optimize cosh(-x) into cosh (x). */
7141 if ((narg = fold_strip_sign_ops (arg)))
7142 return build_call_expr_loc (loc, fndecl, 1, narg);
7148 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7149 argument ARG. TYPE is the type of the return value. Return
7150 NULL_TREE if no simplification can be made. */
7153 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7156 if (validate_arg (arg, COMPLEX_TYPE)
7157 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7161 /* Calculate the result when the argument is a constant. */
7162 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7165 /* Optimize fn(-x) into fn(x). */
7166 if ((tmp = fold_strip_sign_ops (arg)))
7167 return build_call_expr_loc (loc, fndecl, 1, tmp);
7173 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7174 Return NULL_TREE if no simplification can be made. */
7177 fold_builtin_tan (tree arg, tree type)
7179 enum built_in_function fcode;
7182 if (!validate_arg (arg, REAL_TYPE))
7185 /* Calculate the result when the argument is a constant. */
7186 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7189 /* Optimize tan(atan(x)) = x. */
7190 fcode = builtin_mathfn_code (arg);
7191 if (flag_unsafe_math_optimizations
7192 && (fcode == BUILT_IN_ATAN
7193 || fcode == BUILT_IN_ATANF
7194 || fcode == BUILT_IN_ATANL))
7195 return CALL_EXPR_ARG (arg, 0);
7200 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7201 NULL_TREE if no simplification can be made. */
7204 fold_builtin_sincos (location_t loc,
7205 tree arg0, tree arg1, tree arg2)
7210 if (!validate_arg (arg0, REAL_TYPE)
7211 || !validate_arg (arg1, POINTER_TYPE)
7212 || !validate_arg (arg2, POINTER_TYPE))
7215 type = TREE_TYPE (arg0);
7217 /* Calculate the result when the argument is a constant. */
7218 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7221 /* Canonicalize sincos to cexpi. */
7222 if (!TARGET_C99_FUNCTIONS)
7224 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7228 call = build_call_expr_loc (loc, fn, 1, arg0);
7229 call = builtin_save_expr (call);
7231 return build2 (COMPOUND_EXPR, void_type_node,
7232 build2 (MODIFY_EXPR, void_type_node,
7233 build_fold_indirect_ref_loc (loc, arg1),
7234 build1 (IMAGPART_EXPR, type, call)),
7235 build2 (MODIFY_EXPR, void_type_node,
7236 build_fold_indirect_ref_loc (loc, arg2),
7237 build1 (REALPART_EXPR, type, call)));
7240 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7241 NULL_TREE if no simplification can be made. */
7244 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7247 tree realp, imagp, ifn;
7250 if (!validate_arg (arg0, COMPLEX_TYPE)
7251 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7254 /* Calculate the result when the argument is a constant. */
7255 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7258 rtype = TREE_TYPE (TREE_TYPE (arg0));
7260 /* In case we can figure out the real part of arg0 and it is constant zero
7262 if (!TARGET_C99_FUNCTIONS)
7264 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7268 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7269 && real_zerop (realp))
7271 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7272 return build_call_expr_loc (loc, ifn, 1, narg);
7275 /* In case we can easily decompose real and imaginary parts split cexp
7276 to exp (r) * cexpi (i). */
7277 if (flag_unsafe_math_optimizations
7280 tree rfn, rcall, icall;
7282 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7286 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7290 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7291 icall = builtin_save_expr (icall);
7292 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7293 rcall = builtin_save_expr (rcall);
7294 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7295 fold_build2_loc (loc, MULT_EXPR, rtype,
7297 fold_build1_loc (loc, REALPART_EXPR,
7299 fold_build2_loc (loc, MULT_EXPR, rtype,
7301 fold_build1_loc (loc, IMAGPART_EXPR,
7308 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7309 Return NULL_TREE if no simplification can be made. */
7312 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7314 if (!validate_arg (arg, REAL_TYPE))
7317 /* Optimize trunc of constant value. */
7318 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7320 REAL_VALUE_TYPE r, x;
7321 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7323 x = TREE_REAL_CST (arg);
7324 real_trunc (&r, TYPE_MODE (type), &x);
7325 return build_real (type, r);
7328 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7331 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7332 Return NULL_TREE if no simplification can be made. */
7335 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7337 if (!validate_arg (arg, REAL_TYPE))
7340 /* Optimize floor of constant value. */
7341 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7345 x = TREE_REAL_CST (arg);
7346 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7348 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7351 real_floor (&r, TYPE_MODE (type), &x);
7352 return build_real (type, r);
7356 /* Fold floor (x) where x is nonnegative to trunc (x). */
7357 if (tree_expr_nonnegative_p (arg))
7359 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7361 return build_call_expr_loc (loc, truncfn, 1, arg);
7364 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7367 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7368 Return NULL_TREE if no simplification can be made. */
7371 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7373 if (!validate_arg (arg, REAL_TYPE))
7376 /* Optimize ceil of constant value. */
7377 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7381 x = TREE_REAL_CST (arg);
7382 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7384 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7387 real_ceil (&r, TYPE_MODE (type), &x);
7388 return build_real (type, r);
7392 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7395 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7396 Return NULL_TREE if no simplification can be made. */
7399 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7401 if (!validate_arg (arg, REAL_TYPE))
7404 /* Optimize round of constant value. */
7405 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7409 x = TREE_REAL_CST (arg);
7410 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7412 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7415 real_round (&r, TYPE_MODE (type), &x);
7416 return build_real (type, r);
7420 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7423 /* Fold function call to builtin lround, lroundf or lroundl (or the
7424 corresponding long long versions) and other rounding functions. ARG
7425 is the argument to the call. Return NULL_TREE if no simplification
7429 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7431 if (!validate_arg (arg, REAL_TYPE))
7434 /* Optimize lround of constant value. */
7435 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7437 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7439 if (real_isfinite (&x))
7441 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7442 tree ftype = TREE_TYPE (arg);
7443 unsigned HOST_WIDE_INT lo2;
7444 HOST_WIDE_INT hi, lo;
7447 switch (DECL_FUNCTION_CODE (fndecl))
7449 CASE_FLT_FN (BUILT_IN_LFLOOR):
7450 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7451 real_floor (&r, TYPE_MODE (ftype), &x);
7454 CASE_FLT_FN (BUILT_IN_LCEIL):
7455 CASE_FLT_FN (BUILT_IN_LLCEIL):
7456 real_ceil (&r, TYPE_MODE (ftype), &x);
7459 CASE_FLT_FN (BUILT_IN_LROUND):
7460 CASE_FLT_FN (BUILT_IN_LLROUND):
7461 real_round (&r, TYPE_MODE (ftype), &x);
7468 REAL_VALUE_TO_INT (&lo, &hi, r);
7469 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7470 return build_int_cst_wide (itype, lo2, hi);
7474 switch (DECL_FUNCTION_CODE (fndecl))
7476 CASE_FLT_FN (BUILT_IN_LFLOOR):
7477 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7478 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7479 if (tree_expr_nonnegative_p (arg))
7480 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7481 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7486 return fold_fixed_mathfn (loc, fndecl, arg);
7489 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7490 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7491 the argument to the call. Return NULL_TREE if no simplification can
7495 fold_builtin_bitop (tree fndecl, tree arg)
7497 if (!validate_arg (arg, INTEGER_TYPE))
7500 /* Optimize for constant argument. */
7501 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7503 HOST_WIDE_INT hi, width, result;
7504 unsigned HOST_WIDE_INT lo;
7507 type = TREE_TYPE (arg);
7508 width = TYPE_PRECISION (type);
7509 lo = TREE_INT_CST_LOW (arg);
7511 /* Clear all the bits that are beyond the type's precision. */
7512 if (width > HOST_BITS_PER_WIDE_INT)
7514 hi = TREE_INT_CST_HIGH (arg);
7515 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7516 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7521 if (width < HOST_BITS_PER_WIDE_INT)
7522 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7525 switch (DECL_FUNCTION_CODE (fndecl))
7527 CASE_INT_FN (BUILT_IN_FFS):
7529 result = exact_log2 (lo & -lo) + 1;
7531 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7536 CASE_INT_FN (BUILT_IN_CLZ):
7538 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7540 result = width - floor_log2 (lo) - 1;
7541 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7545 CASE_INT_FN (BUILT_IN_CTZ):
7547 result = exact_log2 (lo & -lo);
7549 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7550 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7554 CASE_INT_FN (BUILT_IN_POPCOUNT):
7557 result++, lo &= lo - 1;
7559 result++, hi &= hi - 1;
7562 CASE_INT_FN (BUILT_IN_PARITY):
7565 result++, lo &= lo - 1;
7567 result++, hi &= hi - 1;
7575 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7581 /* Fold function call to builtin_bswap and the long and long long
7582 variants. Return NULL_TREE if no simplification can be made. */
7584 fold_builtin_bswap (tree fndecl, tree arg)
7586 if (! validate_arg (arg, INTEGER_TYPE))
7589 /* Optimize constant value. */
7590 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7592 HOST_WIDE_INT hi, width, r_hi = 0;
7593 unsigned HOST_WIDE_INT lo, r_lo = 0;
7596 type = TREE_TYPE (arg);
7597 width = TYPE_PRECISION (type);
7598 lo = TREE_INT_CST_LOW (arg);
7599 hi = TREE_INT_CST_HIGH (arg);
7601 switch (DECL_FUNCTION_CODE (fndecl))
7603 case BUILT_IN_BSWAP32:
7604 case BUILT_IN_BSWAP64:
7608 for (s = 0; s < width; s += 8)
7610 int d = width - s - 8;
7611 unsigned HOST_WIDE_INT byte;
7613 if (s < HOST_BITS_PER_WIDE_INT)
7614 byte = (lo >> s) & 0xff;
7616 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7618 if (d < HOST_BITS_PER_WIDE_INT)
7621 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7631 if (width < HOST_BITS_PER_WIDE_INT)
7632 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7634 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7640 /* A subroutine of fold_builtin to fold the various logarithmic
7641 functions. Return NULL_TREE if no simplification can me made.
7642 FUNC is the corresponding MPFR logarithm function. */
7645 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7646 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7648 if (validate_arg (arg, REAL_TYPE))
7650 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7652 const enum built_in_function fcode = builtin_mathfn_code (arg);
7654 /* Calculate the result when the argument is a constant. */
7655 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7658 /* Special case, optimize logN(expN(x)) = x. */
7659 if (flag_unsafe_math_optimizations
7660 && ((func == mpfr_log
7661 && (fcode == BUILT_IN_EXP
7662 || fcode == BUILT_IN_EXPF
7663 || fcode == BUILT_IN_EXPL))
7664 || (func == mpfr_log2
7665 && (fcode == BUILT_IN_EXP2
7666 || fcode == BUILT_IN_EXP2F
7667 || fcode == BUILT_IN_EXP2L))
7668 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7669 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7671 /* Optimize logN(func()) for various exponential functions. We
7672 want to determine the value "x" and the power "exponent" in
7673 order to transform logN(x**exponent) into exponent*logN(x). */
7674 if (flag_unsafe_math_optimizations)
7676 tree exponent = 0, x = 0;
7680 CASE_FLT_FN (BUILT_IN_EXP):
7681 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7682 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7684 exponent = CALL_EXPR_ARG (arg, 0);
7686 CASE_FLT_FN (BUILT_IN_EXP2):
7687 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7688 x = build_real (type, dconst2);
7689 exponent = CALL_EXPR_ARG (arg, 0);
7691 CASE_FLT_FN (BUILT_IN_EXP10):
7692 CASE_FLT_FN (BUILT_IN_POW10):
7693 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7695 REAL_VALUE_TYPE dconst10;
7696 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7697 x = build_real (type, dconst10);
7699 exponent = CALL_EXPR_ARG (arg, 0);
7701 CASE_FLT_FN (BUILT_IN_SQRT):
7702 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7703 x = CALL_EXPR_ARG (arg, 0);
7704 exponent = build_real (type, dconsthalf);
7706 CASE_FLT_FN (BUILT_IN_CBRT):
7707 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7708 x = CALL_EXPR_ARG (arg, 0);
7709 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7712 CASE_FLT_FN (BUILT_IN_POW):
7713 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7714 x = CALL_EXPR_ARG (arg, 0);
7715 exponent = CALL_EXPR_ARG (arg, 1);
7721 /* Now perform the optimization. */
7724 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7725 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7733 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7734 NULL_TREE if no simplification can be made. */
7737 fold_builtin_hypot (location_t loc, tree fndecl,
7738 tree arg0, tree arg1, tree type)
7740 tree res, narg0, narg1;
7742 if (!validate_arg (arg0, REAL_TYPE)
7743 || !validate_arg (arg1, REAL_TYPE))
7746 /* Calculate the result when the argument is a constant. */
7747 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7750 /* If either argument to hypot has a negate or abs, strip that off.
7751 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7752 narg0 = fold_strip_sign_ops (arg0);
7753 narg1 = fold_strip_sign_ops (arg1);
7756 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7757 narg1 ? narg1 : arg1);
7760 /* If either argument is zero, hypot is fabs of the other. */
7761 if (real_zerop (arg0))
7762 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7763 else if (real_zerop (arg1))
7764 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7766 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7767 if (flag_unsafe_math_optimizations
7768 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7770 const REAL_VALUE_TYPE sqrt2_trunc
7771 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7772 return fold_build2_loc (loc, MULT_EXPR, type,
7773 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7774 build_real (type, sqrt2_trunc));
7781 /* Fold a builtin function call to pow, powf, or powl. Return
7782 NULL_TREE if no simplification can be made. */
7784 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7788 if (!validate_arg (arg0, REAL_TYPE)
7789 || !validate_arg (arg1, REAL_TYPE))
7792 /* Calculate the result when the argument is a constant. */
7793 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7796 /* Optimize pow(1.0,y) = 1.0. */
7797 if (real_onep (arg0))
7798 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7800 if (TREE_CODE (arg1) == REAL_CST
7801 && !TREE_OVERFLOW (arg1))
7803 REAL_VALUE_TYPE cint;
7807 c = TREE_REAL_CST (arg1);
7809 /* Optimize pow(x,0.0) = 1.0. */
7810 if (REAL_VALUES_EQUAL (c, dconst0))
7811 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7814 /* Optimize pow(x,1.0) = x. */
7815 if (REAL_VALUES_EQUAL (c, dconst1))
7818 /* Optimize pow(x,-1.0) = 1.0/x. */
7819 if (REAL_VALUES_EQUAL (c, dconstm1))
7820 return fold_build2_loc (loc, RDIV_EXPR, type,
7821 build_real (type, dconst1), arg0);
7823 /* Optimize pow(x,0.5) = sqrt(x). */
7824 if (flag_unsafe_math_optimizations
7825 && REAL_VALUES_EQUAL (c, dconsthalf))
7827 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7829 if (sqrtfn != NULL_TREE)
7830 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7833 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7834 if (flag_unsafe_math_optimizations)
7836 const REAL_VALUE_TYPE dconstroot
7837 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7839 if (REAL_VALUES_EQUAL (c, dconstroot))
7841 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7842 if (cbrtfn != NULL_TREE)
7843 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7847 /* Check for an integer exponent. */
7848 n = real_to_integer (&c);
7849 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7850 if (real_identical (&c, &cint))
7852 /* Attempt to evaluate pow at compile-time, unless this should
7853 raise an exception. */
7854 if (TREE_CODE (arg0) == REAL_CST
7855 && !TREE_OVERFLOW (arg0)
7857 || (!flag_trapping_math && !flag_errno_math)
7858 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7863 x = TREE_REAL_CST (arg0);
7864 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7865 if (flag_unsafe_math_optimizations || !inexact)
7866 return build_real (type, x);
7869 /* Strip sign ops from even integer powers. */
7870 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7872 tree narg0 = fold_strip_sign_ops (arg0);
7874 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7879 if (flag_unsafe_math_optimizations)
7881 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7883 /* Optimize pow(expN(x),y) = expN(x*y). */
7884 if (BUILTIN_EXPONENT_P (fcode))
7886 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7887 tree arg = CALL_EXPR_ARG (arg0, 0);
7888 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7889 return build_call_expr_loc (loc, expfn, 1, arg);
7892 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7893 if (BUILTIN_SQRT_P (fcode))
7895 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7896 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7897 build_real (type, dconsthalf));
7898 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7901 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7902 if (BUILTIN_CBRT_P (fcode))
7904 tree arg = CALL_EXPR_ARG (arg0, 0);
7905 if (tree_expr_nonnegative_p (arg))
7907 const REAL_VALUE_TYPE dconstroot
7908 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7909 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7910 build_real (type, dconstroot));
7911 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7915 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7916 if (fcode == BUILT_IN_POW
7917 || fcode == BUILT_IN_POWF
7918 || fcode == BUILT_IN_POWL)
7920 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7921 if (tree_expr_nonnegative_p (arg00))
7923 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7924 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7925 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7933 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7934 Return NULL_TREE if no simplification can be made. */
7936 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7937 tree arg0, tree arg1, tree type)
7939 if (!validate_arg (arg0, REAL_TYPE)
7940 || !validate_arg (arg1, INTEGER_TYPE))
7943 /* Optimize pow(1.0,y) = 1.0. */
7944 if (real_onep (arg0))
7945 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7947 if (host_integerp (arg1, 0))
7949 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7951 /* Evaluate powi at compile-time. */
7952 if (TREE_CODE (arg0) == REAL_CST
7953 && !TREE_OVERFLOW (arg0))
7956 x = TREE_REAL_CST (arg0);
7957 real_powi (&x, TYPE_MODE (type), &x, c);
7958 return build_real (type, x);
7961 /* Optimize pow(x,0) = 1.0. */
7963 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7966 /* Optimize pow(x,1) = x. */
7970 /* Optimize pow(x,-1) = 1.0/x. */
7972 return fold_build2_loc (loc, RDIV_EXPR, type,
7973 build_real (type, dconst1), arg0);
7979 /* A subroutine of fold_builtin to fold the various exponent
7980 functions. Return NULL_TREE if no simplification can be made.
7981 FUNC is the corresponding MPFR exponent function. */
7984 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7985 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7987 if (validate_arg (arg, REAL_TYPE))
7989 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7992 /* Calculate the result when the argument is a constant. */
7993 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7996 /* Optimize expN(logN(x)) = x. */
7997 if (flag_unsafe_math_optimizations)
7999 const enum built_in_function fcode = builtin_mathfn_code (arg);
8001 if ((func == mpfr_exp
8002 && (fcode == BUILT_IN_LOG
8003 || fcode == BUILT_IN_LOGF
8004 || fcode == BUILT_IN_LOGL))
8005 || (func == mpfr_exp2
8006 && (fcode == BUILT_IN_LOG2
8007 || fcode == BUILT_IN_LOG2F
8008 || fcode == BUILT_IN_LOG2L))
8009 || (func == mpfr_exp10
8010 && (fcode == BUILT_IN_LOG10
8011 || fcode == BUILT_IN_LOG10F
8012 || fcode == BUILT_IN_LOG10L)))
8013 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8020 /* Return true if VAR is a VAR_DECL or a component thereof. */
8023 var_decl_component_p (tree var)
8026 while (handled_component_p (inner))
8027 inner = TREE_OPERAND (inner, 0);
8028 return SSA_VAR_P (inner);
8031 /* Fold function call to builtin memset. Return
8032 NULL_TREE if no simplification can be made. */
8035 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8036 tree type, bool ignore)
8038 tree var, ret, etype;
8039 unsigned HOST_WIDE_INT length, cval;
8041 if (! validate_arg (dest, POINTER_TYPE)
8042 || ! validate_arg (c, INTEGER_TYPE)
8043 || ! validate_arg (len, INTEGER_TYPE))
8046 if (! host_integerp (len, 1))
8049 /* If the LEN parameter is zero, return DEST. */
8050 if (integer_zerop (len))
8051 return omit_one_operand_loc (loc, type, dest, c);
8053 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8058 if (TREE_CODE (var) != ADDR_EXPR)
8061 var = TREE_OPERAND (var, 0);
8062 if (TREE_THIS_VOLATILE (var))
8065 etype = TREE_TYPE (var);
8066 if (TREE_CODE (etype) == ARRAY_TYPE)
8067 etype = TREE_TYPE (etype);
8069 if (!INTEGRAL_TYPE_P (etype)
8070 && !POINTER_TYPE_P (etype))
8073 if (! var_decl_component_p (var))
8076 length = tree_low_cst (len, 1);
8077 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8078 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8082 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8085 if (integer_zerop (c))
8089 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8092 cval = tree_low_cst (c, 1);
8096 cval |= (cval << 31) << 1;
8099 ret = build_int_cst_type (etype, cval);
8100 var = build_fold_indirect_ref_loc (loc,
8101 fold_convert_loc (loc,
8102 build_pointer_type (etype),
8104 ret = build2 (MODIFY_EXPR, etype, var, ret);
8108 return omit_one_operand_loc (loc, type, dest, ret);
8111 /* Fold function call to builtin memset. Return
8112 NULL_TREE if no simplification can be made. */
8115 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8117 if (! validate_arg (dest, POINTER_TYPE)
8118 || ! validate_arg (size, INTEGER_TYPE))
8124 /* New argument list transforming bzero(ptr x, int y) to
8125 memset(ptr x, int 0, size_t y). This is done this way
8126 so that if it isn't expanded inline, we fallback to
8127 calling bzero instead of memset. */
8129 return fold_builtin_memset (loc, dest, integer_zero_node,
8130 fold_convert_loc (loc, sizetype, size),
8131 void_type_node, ignore);
8134 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8135 NULL_TREE if no simplification can be made.
8136 If ENDP is 0, return DEST (like memcpy).
8137 If ENDP is 1, return DEST+LEN (like mempcpy).
8138 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8139 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8143 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8144 tree len, tree type, bool ignore, int endp)
8146 tree destvar, srcvar, expr;
8148 if (! validate_arg (dest, POINTER_TYPE)
8149 || ! validate_arg (src, POINTER_TYPE)
8150 || ! validate_arg (len, INTEGER_TYPE))
8153 /* If the LEN parameter is zero, return DEST. */
8154 if (integer_zerop (len))
8155 return omit_one_operand_loc (loc, type, dest, src);
8157 /* If SRC and DEST are the same (and not volatile), return
8158 DEST{,+LEN,+LEN-1}. */
8159 if (operand_equal_p (src, dest, 0))
8163 tree srctype, desttype;
8164 int src_align, dest_align;
8168 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8169 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8171 /* Both DEST and SRC must be pointer types.
8172 ??? This is what old code did. Is the testing for pointer types
8175 If either SRC is readonly or length is 1, we can use memcpy. */
8176 if (!dest_align || !src_align)
8178 if (readonly_data_expr (src)
8179 || (host_integerp (len, 1)
8180 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8181 >= tree_low_cst (len, 1))))
8183 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8186 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8189 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8190 srcvar = build_fold_indirect_ref_loc (loc, src);
8191 destvar = build_fold_indirect_ref_loc (loc, dest);
8193 && !TREE_THIS_VOLATILE (srcvar)
8195 && !TREE_THIS_VOLATILE (destvar))
8197 tree src_base, dest_base, fn;
8198 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8199 HOST_WIDE_INT size = -1;
8200 HOST_WIDE_INT maxsize = -1;
8203 if (handled_component_p (src_base))
8204 src_base = get_ref_base_and_extent (src_base, &src_offset,
8206 dest_base = destvar;
8207 if (handled_component_p (dest_base))
8208 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8210 if (host_integerp (len, 1))
8212 maxsize = tree_low_cst (len, 1);
8214 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8217 maxsize *= BITS_PER_UNIT;
8221 if (SSA_VAR_P (src_base)
8222 && SSA_VAR_P (dest_base))
8224 if (operand_equal_p (src_base, dest_base, 0)
8225 && ranges_overlap_p (src_offset, maxsize,
8226 dest_offset, maxsize))
8229 else if (TREE_CODE (src_base) == INDIRECT_REF
8230 && TREE_CODE (dest_base) == INDIRECT_REF)
8232 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8233 TREE_OPERAND (dest_base, 0), 0)
8234 || ranges_overlap_p (src_offset, maxsize,
8235 dest_offset, maxsize))
8241 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8244 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8249 if (!host_integerp (len, 0))
8252 This logic lose for arguments like (type *)malloc (sizeof (type)),
8253 since we strip the casts of up to VOID return value from malloc.
8254 Perhaps we ought to inherit type from non-VOID argument here? */
8257 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8258 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8260 tree tem = TREE_OPERAND (src, 0);
8262 if (tem != TREE_OPERAND (src, 0))
8263 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8265 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8267 tree tem = TREE_OPERAND (dest, 0);
8269 if (tem != TREE_OPERAND (dest, 0))
8270 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8272 srctype = TREE_TYPE (TREE_TYPE (src));
8274 && TREE_CODE (srctype) == ARRAY_TYPE
8275 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8277 srctype = TREE_TYPE (srctype);
8279 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8281 desttype = TREE_TYPE (TREE_TYPE (dest));
8283 && TREE_CODE (desttype) == ARRAY_TYPE
8284 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8286 desttype = TREE_TYPE (desttype);
8288 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8290 if (!srctype || !desttype
8291 || !TYPE_SIZE_UNIT (srctype)
8292 || !TYPE_SIZE_UNIT (desttype)
8293 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8294 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8295 || TYPE_VOLATILE (srctype)
8296 || TYPE_VOLATILE (desttype))
8299 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8300 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8301 if (dest_align < (int) TYPE_ALIGN (desttype)
8302 || src_align < (int) TYPE_ALIGN (srctype))
8306 dest = builtin_save_expr (dest);
8309 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8311 srcvar = build_fold_indirect_ref_loc (loc, src);
8312 if (TREE_THIS_VOLATILE (srcvar))
8314 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8316 /* With memcpy, it is possible to bypass aliasing rules, so without
8317 this check i.e. execute/20060930-2.c would be misoptimized,
8318 because it use conflicting alias set to hold argument for the
8319 memcpy call. This check is probably unnecessary with
8320 -fno-strict-aliasing. Similarly for destvar. See also
8322 else if (!var_decl_component_p (srcvar))
8326 destvar = NULL_TREE;
8327 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8329 destvar = build_fold_indirect_ref_loc (loc, dest);
8330 if (TREE_THIS_VOLATILE (destvar))
8332 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8333 destvar = NULL_TREE;
8334 else if (!var_decl_component_p (destvar))
8335 destvar = NULL_TREE;
8338 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8341 if (srcvar == NULL_TREE)
8344 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8347 srctype = build_qualified_type (desttype, 0);
8348 if (src_align < (int) TYPE_ALIGN (srctype))
8350 if (AGGREGATE_TYPE_P (srctype)
8351 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8354 srctype = build_variant_type_copy (srctype);
8355 TYPE_ALIGN (srctype) = src_align;
8356 TYPE_USER_ALIGN (srctype) = 1;
8357 TYPE_PACKED (srctype) = 1;
8359 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8360 src = fold_convert_loc (loc, srcptype, src);
8361 srcvar = build_fold_indirect_ref_loc (loc, src);
8363 else if (destvar == NULL_TREE)
8366 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8369 desttype = build_qualified_type (srctype, 0);
8370 if (dest_align < (int) TYPE_ALIGN (desttype))
8372 if (AGGREGATE_TYPE_P (desttype)
8373 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8376 desttype = build_variant_type_copy (desttype);
8377 TYPE_ALIGN (desttype) = dest_align;
8378 TYPE_USER_ALIGN (desttype) = 1;
8379 TYPE_PACKED (desttype) = 1;
8381 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8382 dest = fold_convert_loc (loc, destptype, dest);
8383 destvar = build_fold_indirect_ref_loc (loc, dest);
8386 if (srctype == desttype
8387 || (gimple_in_ssa_p (cfun)
8388 && useless_type_conversion_p (desttype, srctype)))
8390 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8391 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8392 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8393 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8394 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8396 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8397 TREE_TYPE (destvar), srcvar);
8398 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8404 if (endp == 0 || endp == 3)
8405 return omit_one_operand_loc (loc, type, dest, expr);
8411 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8414 len = fold_convert_loc (loc, sizetype, len);
8415 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8416 dest = fold_convert_loc (loc, type, dest);
8418 dest = omit_one_operand_loc (loc, type, dest, expr);
8422 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8423 If LEN is not NULL, it represents the length of the string to be
8424 copied. Return NULL_TREE if no simplification can be made. */
8427 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8431 if (!validate_arg (dest, POINTER_TYPE)
8432 || !validate_arg (src, POINTER_TYPE))
8435 /* If SRC and DEST are the same (and not volatile), return DEST. */
8436 if (operand_equal_p (src, dest, 0))
8437 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8439 if (optimize_function_for_size_p (cfun))
8442 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8448 len = c_strlen (src, 1);
8449 if (! len || TREE_SIDE_EFFECTS (len))
8453 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8454 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8455 build_call_expr_loc (loc, fn, 3, dest, src, len));
8458 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8459 Return NULL_TREE if no simplification can be made. */
8462 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8464 tree fn, len, lenp1, call, type;
8466 if (!validate_arg (dest, POINTER_TYPE)
8467 || !validate_arg (src, POINTER_TYPE))
8470 len = c_strlen (src, 1);
8472 || TREE_CODE (len) != INTEGER_CST)
8475 if (optimize_function_for_size_p (cfun)
8476 /* If length is zero it's small enough. */
8477 && !integer_zerop (len))
8480 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8484 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8485 /* We use dest twice in building our expression. Save it from
8486 multiple expansions. */
8487 dest = builtin_save_expr (dest);
8488 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8490 type = TREE_TYPE (TREE_TYPE (fndecl));
8491 len = fold_convert_loc (loc, sizetype, len);
8492 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8493 dest = fold_convert_loc (loc, type, dest);
8494 dest = omit_one_operand_loc (loc, type, dest, call);
8498 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8499 If SLEN is not NULL, it represents the length of the source string.
8500 Return NULL_TREE if no simplification can be made. */
8503 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8504 tree src, tree len, tree slen)
8508 if (!validate_arg (dest, POINTER_TYPE)
8509 || !validate_arg (src, POINTER_TYPE)
8510 || !validate_arg (len, INTEGER_TYPE))
8513 /* If the LEN parameter is zero, return DEST. */
8514 if (integer_zerop (len))
8515 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8517 /* We can't compare slen with len as constants below if len is not a
8519 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8523 slen = c_strlen (src, 1);
8525 /* Now, we must be passed a constant src ptr parameter. */
8526 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8529 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8531 /* We do not support simplification of this case, though we do
8532 support it when expanding trees into RTL. */
8533 /* FIXME: generate a call to __builtin_memset. */
8534 if (tree_int_cst_lt (slen, len))
8537 /* OK transform into builtin memcpy. */
8538 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8541 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8542 build_call_expr_loc (loc, fn, 3, dest, src, len));
8545 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8546 arguments to the call, and TYPE is its return type.
8547 Return NULL_TREE if no simplification can be made. */
8550 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8552 if (!validate_arg (arg1, POINTER_TYPE)
8553 || !validate_arg (arg2, INTEGER_TYPE)
8554 || !validate_arg (len, INTEGER_TYPE))
8560 if (TREE_CODE (arg2) != INTEGER_CST
8561 || !host_integerp (len, 1))
8564 p1 = c_getstr (arg1);
8565 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8571 if (target_char_cast (arg2, &c))
8574 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8577 return build_int_cst (TREE_TYPE (arg1), 0);
8579 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8581 return fold_convert_loc (loc, type, tem);
8587 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8588 Return NULL_TREE if no simplification can be made. */
8591 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8593 const char *p1, *p2;
8595 if (!validate_arg (arg1, POINTER_TYPE)
8596 || !validate_arg (arg2, POINTER_TYPE)
8597 || !validate_arg (len, INTEGER_TYPE))
8600 /* If the LEN parameter is zero, return zero. */
8601 if (integer_zerop (len))
8602 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8605 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8606 if (operand_equal_p (arg1, arg2, 0))
8607 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8609 p1 = c_getstr (arg1);
8610 p2 = c_getstr (arg2);
8612 /* If all arguments are constant, and the value of len is not greater
8613 than the lengths of arg1 and arg2, evaluate at compile-time. */
8614 if (host_integerp (len, 1) && p1 && p2
8615 && compare_tree_int (len, strlen (p1) + 1) <= 0
8616 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8618 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8621 return integer_one_node;
8623 return integer_minus_one_node;
8625 return integer_zero_node;
8628 /* If len parameter is one, return an expression corresponding to
8629 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8630 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8632 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8633 tree cst_uchar_ptr_node
8634 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8637 = fold_convert_loc (loc, integer_type_node,
8638 build1 (INDIRECT_REF, cst_uchar_node,
8639 fold_convert_loc (loc,
8643 = fold_convert_loc (loc, integer_type_node,
8644 build1 (INDIRECT_REF, cst_uchar_node,
8645 fold_convert_loc (loc,
8648 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8654 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8655 Return NULL_TREE if no simplification can be made. */
8658 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8660 const char *p1, *p2;
8662 if (!validate_arg (arg1, POINTER_TYPE)
8663 || !validate_arg (arg2, POINTER_TYPE))
8666 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8667 if (operand_equal_p (arg1, arg2, 0))
8668 return integer_zero_node;
8670 p1 = c_getstr (arg1);
8671 p2 = c_getstr (arg2);
8675 const int i = strcmp (p1, p2);
8677 return integer_minus_one_node;
8679 return integer_one_node;
8681 return integer_zero_node;
8684 /* If the second arg is "", return *(const unsigned char*)arg1. */
8685 if (p2 && *p2 == '\0')
8687 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8688 tree cst_uchar_ptr_node
8689 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8691 return fold_convert_loc (loc, integer_type_node,
8692 build1 (INDIRECT_REF, cst_uchar_node,
8693 fold_convert_loc (loc,
8698 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8699 if (p1 && *p1 == '\0')
8701 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8702 tree cst_uchar_ptr_node
8703 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8706 = fold_convert_loc (loc, integer_type_node,
8707 build1 (INDIRECT_REF, cst_uchar_node,
8708 fold_convert_loc (loc,
8711 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8717 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8718 Return NULL_TREE if no simplification can be made. */
8721 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8723 const char *p1, *p2;
8725 if (!validate_arg (arg1, POINTER_TYPE)
8726 || !validate_arg (arg2, POINTER_TYPE)
8727 || !validate_arg (len, INTEGER_TYPE))
8730 /* If the LEN parameter is zero, return zero. */
8731 if (integer_zerop (len))
8732 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8735 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8736 if (operand_equal_p (arg1, arg2, 0))
8737 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8739 p1 = c_getstr (arg1);
8740 p2 = c_getstr (arg2);
8742 if (host_integerp (len, 1) && p1 && p2)
8744 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8746 return integer_one_node;
8748 return integer_minus_one_node;
8750 return integer_zero_node;
8753 /* If the second arg is "", and the length is greater than zero,
8754 return *(const unsigned char*)arg1. */
8755 if (p2 && *p2 == '\0'
8756 && TREE_CODE (len) == INTEGER_CST
8757 && tree_int_cst_sgn (len) == 1)
8759 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8760 tree cst_uchar_ptr_node
8761 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8763 return fold_convert_loc (loc, integer_type_node,
8764 build1 (INDIRECT_REF, cst_uchar_node,
8765 fold_convert_loc (loc,
8770 /* If the first arg is "", and the length is greater than zero,
8771 return -*(const unsigned char*)arg2. */
8772 if (p1 && *p1 == '\0'
8773 && TREE_CODE (len) == INTEGER_CST
8774 && tree_int_cst_sgn (len) == 1)
8776 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8777 tree cst_uchar_ptr_node
8778 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8780 tree temp = fold_convert_loc (loc, integer_type_node,
8781 build1 (INDIRECT_REF, cst_uchar_node,
8782 fold_convert_loc (loc,
8785 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8788 /* If len parameter is one, return an expression corresponding to
8789 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8790 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8792 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8793 tree cst_uchar_ptr_node
8794 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8796 tree ind1 = fold_convert_loc (loc, integer_type_node,
8797 build1 (INDIRECT_REF, cst_uchar_node,
8798 fold_convert_loc (loc,
8801 tree ind2 = fold_convert_loc (loc, integer_type_node,
8802 build1 (INDIRECT_REF, cst_uchar_node,
8803 fold_convert_loc (loc,
8806 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8812 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8813 ARG. Return NULL_TREE if no simplification can be made. */
8816 fold_builtin_signbit (location_t loc, tree arg, tree type)
8820 if (!validate_arg (arg, REAL_TYPE))
8823 /* If ARG is a compile-time constant, determine the result. */
8824 if (TREE_CODE (arg) == REAL_CST
8825 && !TREE_OVERFLOW (arg))
8829 c = TREE_REAL_CST (arg);
8830 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8831 return fold_convert_loc (loc, type, temp);
8834 /* If ARG is non-negative, the result is always zero. */
8835 if (tree_expr_nonnegative_p (arg))
8836 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8838 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8839 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8840 return fold_build2_loc (loc, LT_EXPR, type, arg,
8841 build_real (TREE_TYPE (arg), dconst0));
8846 /* Fold function call to builtin copysign, copysignf or copysignl with
8847 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8851 fold_builtin_copysign (location_t loc, tree fndecl,
8852 tree arg1, tree arg2, tree type)
8856 if (!validate_arg (arg1, REAL_TYPE)
8857 || !validate_arg (arg2, REAL_TYPE))
8860 /* copysign(X,X) is X. */
8861 if (operand_equal_p (arg1, arg2, 0))
8862 return fold_convert_loc (loc, type, arg1);
8864 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8865 if (TREE_CODE (arg1) == REAL_CST
8866 && TREE_CODE (arg2) == REAL_CST
8867 && !TREE_OVERFLOW (arg1)
8868 && !TREE_OVERFLOW (arg2))
8870 REAL_VALUE_TYPE c1, c2;
8872 c1 = TREE_REAL_CST (arg1);
8873 c2 = TREE_REAL_CST (arg2);
8874 /* c1.sign := c2.sign. */
8875 real_copysign (&c1, &c2);
8876 return build_real (type, c1);
8879 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8880 Remember to evaluate Y for side-effects. */
8881 if (tree_expr_nonnegative_p (arg2))
8882 return omit_one_operand_loc (loc, type,
8883 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8886 /* Strip sign changing operations for the first argument. */
8887 tem = fold_strip_sign_ops (arg1);
8889 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8894 /* Fold a call to builtin isascii with argument ARG. */
8897 fold_builtin_isascii (location_t loc, tree arg)
8899 if (!validate_arg (arg, INTEGER_TYPE))
8903 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8904 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8905 build_int_cst (NULL_TREE,
8906 ~ (unsigned HOST_WIDE_INT) 0x7f));
8907 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8908 arg, integer_zero_node);
8912 /* Fold a call to builtin toascii with argument ARG. */
8915 fold_builtin_toascii (location_t loc, tree arg)
8917 if (!validate_arg (arg, INTEGER_TYPE))
8920 /* Transform toascii(c) -> (c & 0x7f). */
8921 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8922 build_int_cst (NULL_TREE, 0x7f));
8925 /* Fold a call to builtin isdigit with argument ARG. */
8928 fold_builtin_isdigit (location_t loc, tree arg)
8930 if (!validate_arg (arg, INTEGER_TYPE))
8934 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8935 /* According to the C standard, isdigit is unaffected by locale.
8936 However, it definitely is affected by the target character set. */
8937 unsigned HOST_WIDE_INT target_digit0
8938 = lang_hooks.to_target_charset ('0');
8940 if (target_digit0 == 0)
8943 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8944 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8945 build_int_cst (unsigned_type_node, target_digit0));
8946 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8947 build_int_cst (unsigned_type_node, 9));
8951 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8954 fold_builtin_fabs (location_t loc, tree arg, tree type)
8956 if (!validate_arg (arg, REAL_TYPE))
8959 arg = fold_convert_loc (loc, type, arg);
8960 if (TREE_CODE (arg) == REAL_CST)
8961 return fold_abs_const (arg, type);
8962 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8965 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8968 fold_builtin_abs (location_t loc, tree arg, tree type)
8970 if (!validate_arg (arg, INTEGER_TYPE))
8973 arg = fold_convert_loc (loc, type, arg);
8974 if (TREE_CODE (arg) == INTEGER_CST)
8975 return fold_abs_const (arg, type);
8976 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8979 /* Fold a call to builtin fmin or fmax. */
8982 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8983 tree type, bool max)
8985 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8987 /* Calculate the result when the argument is a constant. */
8988 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8993 /* If either argument is NaN, return the other one. Avoid the
8994 transformation if we get (and honor) a signalling NaN. Using
8995 omit_one_operand() ensures we create a non-lvalue. */
8996 if (TREE_CODE (arg0) == REAL_CST
8997 && real_isnan (&TREE_REAL_CST (arg0))
8998 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8999 || ! TREE_REAL_CST (arg0).signalling))
9000 return omit_one_operand_loc (loc, type, arg1, arg0);
9001 if (TREE_CODE (arg1) == REAL_CST
9002 && real_isnan (&TREE_REAL_CST (arg1))
9003 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9004 || ! TREE_REAL_CST (arg1).signalling))
9005 return omit_one_operand_loc (loc, type, arg0, arg1);
9007 /* Transform fmin/fmax(x,x) -> x. */
9008 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9009 return omit_one_operand_loc (loc, type, arg0, arg1);
9011 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9012 functions to return the numeric arg if the other one is NaN.
9013 These tree codes don't honor that, so only transform if
9014 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9015 handled, so we don't have to worry about it either. */
9016 if (flag_finite_math_only)
9017 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9018 fold_convert_loc (loc, type, arg0),
9019 fold_convert_loc (loc, type, arg1));
9024 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9027 fold_builtin_carg (location_t loc, tree arg, tree type)
9029 if (validate_arg (arg, COMPLEX_TYPE)
9030 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9032 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9036 tree new_arg = builtin_save_expr (arg);
9037 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9038 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9039 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9046 /* Fold a call to builtin logb/ilogb. */
9049 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9051 if (! validate_arg (arg, REAL_TYPE))
9056 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9058 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9064 /* If arg is Inf or NaN and we're logb, return it. */
9065 if (TREE_CODE (rettype) == REAL_TYPE)
9066 return fold_convert_loc (loc, rettype, arg);
9067 /* Fall through... */
9069 /* Zero may set errno and/or raise an exception for logb, also
9070 for ilogb we don't know FP_ILOGB0. */
9073 /* For normal numbers, proceed iff radix == 2. In GCC,
9074 normalized significands are in the range [0.5, 1.0). We
9075 want the exponent as if they were [1.0, 2.0) so get the
9076 exponent and subtract 1. */
9077 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9078 return fold_convert_loc (loc, rettype,
9079 build_int_cst (NULL_TREE,
9080 REAL_EXP (value)-1));
9088 /* Fold a call to builtin significand, if radix == 2. */
9091 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9093 if (! validate_arg (arg, REAL_TYPE))
9098 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9100 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9107 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9108 return fold_convert_loc (loc, rettype, arg);
9110 /* For normal numbers, proceed iff radix == 2. */
9111 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9113 REAL_VALUE_TYPE result = *value;
9114 /* In GCC, normalized significands are in the range [0.5,
9115 1.0). We want them to be [1.0, 2.0) so set the
9117 SET_REAL_EXP (&result, 1);
9118 return build_real (rettype, result);
9127 /* Fold a call to builtin frexp, we can assume the base is 2. */
9130 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9132 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9137 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9140 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9142 /* Proceed if a valid pointer type was passed in. */
9143 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9145 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9151 /* For +-0, return (*exp = 0, +-0). */
9152 exp = integer_zero_node;
9157 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9158 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9161 /* Since the frexp function always expects base 2, and in
9162 GCC normalized significands are already in the range
9163 [0.5, 1.0), we have exactly what frexp wants. */
9164 REAL_VALUE_TYPE frac_rvt = *value;
9165 SET_REAL_EXP (&frac_rvt, 0);
9166 frac = build_real (rettype, frac_rvt);
9167 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9174 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9175 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9176 TREE_SIDE_EFFECTS (arg1) = 1;
9177 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9183 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9184 then we can assume the base is two. If it's false, then we have to
9185 check the mode of the TYPE parameter in certain cases. */
9188 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9189 tree type, bool ldexp)
9191 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9196 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9197 if (real_zerop (arg0) || integer_zerop (arg1)
9198 || (TREE_CODE (arg0) == REAL_CST
9199 && !real_isfinite (&TREE_REAL_CST (arg0))))
9200 return omit_one_operand_loc (loc, type, arg0, arg1);
9202 /* If both arguments are constant, then try to evaluate it. */
9203 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9204 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9205 && host_integerp (arg1, 0))
9207 /* Bound the maximum adjustment to twice the range of the
9208 mode's valid exponents. Use abs to ensure the range is
9209 positive as a sanity check. */
9210 const long max_exp_adj = 2 *
9211 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9212 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9214 /* Get the user-requested adjustment. */
9215 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9217 /* The requested adjustment must be inside this range. This
9218 is a preliminary cap to avoid things like overflow, we
9219 may still fail to compute the result for other reasons. */
9220 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9222 REAL_VALUE_TYPE initial_result;
9224 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9226 /* Ensure we didn't overflow. */
9227 if (! real_isinf (&initial_result))
9229 const REAL_VALUE_TYPE trunc_result
9230 = real_value_truncate (TYPE_MODE (type), initial_result);
9232 /* Only proceed if the target mode can hold the
9234 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9235 return build_real (type, trunc_result);
9244 /* Fold a call to builtin modf. */
9247 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9249 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9254 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9257 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9259 /* Proceed if a valid pointer type was passed in. */
9260 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9262 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9263 REAL_VALUE_TYPE trunc, frac;
9269 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9270 trunc = frac = *value;
9273 /* For +-Inf, return (*arg1 = arg0, +-0). */
9275 frac.sign = value->sign;
9279 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9280 real_trunc (&trunc, VOIDmode, value);
9281 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9282 /* If the original number was negative and already
9283 integral, then the fractional part is -0.0. */
9284 if (value->sign && frac.cl == rvc_zero)
9285 frac.sign = value->sign;
9289 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9290 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9291 build_real (rettype, trunc));
9292 TREE_SIDE_EFFECTS (arg1) = 1;
9293 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9294 build_real (rettype, frac));
9300 /* Given a location LOC, an interclass builtin function decl FNDECL
9301 and its single argument ARG, return an folded expression computing
9302 the same, or NULL_TREE if we either couldn't or didn't want to fold
9303 (the latter happen if there's an RTL instruction available). */
9306 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9308 enum machine_mode mode;
9310 if (!validate_arg (arg, REAL_TYPE))
9313 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9316 mode = TYPE_MODE (TREE_TYPE (arg));
9318 /* If there is no optab, try generic code. */
9319 switch (DECL_FUNCTION_CODE (fndecl))
9323 CASE_FLT_FN (BUILT_IN_ISINF):
9325 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9326 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9327 tree const type = TREE_TYPE (arg);
9331 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9332 real_from_string (&r, buf);
9333 result = build_call_expr (isgr_fn, 2,
9334 fold_build1_loc (loc, ABS_EXPR, type, arg),
9335 build_real (type, r));
9338 CASE_FLT_FN (BUILT_IN_FINITE):
9339 case BUILT_IN_ISFINITE:
9341 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9342 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9343 tree const type = TREE_TYPE (arg);
9347 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9348 real_from_string (&r, buf);
9349 result = build_call_expr (isle_fn, 2,
9350 fold_build1_loc (loc, ABS_EXPR, type, arg),
9351 build_real (type, r));
9352 /*result = fold_build2_loc (loc, UNGT_EXPR,
9353 TREE_TYPE (TREE_TYPE (fndecl)),
9354 fold_build1_loc (loc, ABS_EXPR, type, arg),
9355 build_real (type, r));
9356 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9357 TREE_TYPE (TREE_TYPE (fndecl)),
9361 case BUILT_IN_ISNORMAL:
9363 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9364 islessequal(fabs(x),DBL_MAX). */
9365 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9366 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9367 tree const type = TREE_TYPE (arg);
9368 REAL_VALUE_TYPE rmax, rmin;
9371 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9372 real_from_string (&rmax, buf);
9373 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9374 real_from_string (&rmin, buf);
9375 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9376 result = build_call_expr (isle_fn, 2, arg,
9377 build_real (type, rmax));
9378 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9379 build_call_expr (isge_fn, 2, arg,
9380 build_real (type, rmin)));
9390 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9391 ARG is the argument for the call. */
9394 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9396 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9399 if (!validate_arg (arg, REAL_TYPE))
9402 switch (builtin_index)
9404 case BUILT_IN_ISINF:
9405 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9406 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9408 if (TREE_CODE (arg) == REAL_CST)
9410 r = TREE_REAL_CST (arg);
9411 if (real_isinf (&r))
9412 return real_compare (GT_EXPR, &r, &dconst0)
9413 ? integer_one_node : integer_minus_one_node;
9415 return integer_zero_node;
9420 case BUILT_IN_ISINF_SIGN:
9422 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9423 /* In a boolean context, GCC will fold the inner COND_EXPR to
9424 1. So e.g. "if (isinf_sign(x))" would be folded to just
9425 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9426 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9427 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9428 tree tmp = NULL_TREE;
9430 arg = builtin_save_expr (arg);
9432 if (signbit_fn && isinf_fn)
9434 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9435 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9437 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9438 signbit_call, integer_zero_node);
9439 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9440 isinf_call, integer_zero_node);
9442 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9443 integer_minus_one_node, integer_one_node);
9444 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9452 case BUILT_IN_ISFINITE:
9453 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9454 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9455 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9457 if (TREE_CODE (arg) == REAL_CST)
9459 r = TREE_REAL_CST (arg);
9460 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9465 case BUILT_IN_ISNAN:
9466 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9467 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9469 if (TREE_CODE (arg) == REAL_CST)
9471 r = TREE_REAL_CST (arg);
9472 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9475 arg = builtin_save_expr (arg);
9476 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9483 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9484 This builtin will generate code to return the appropriate floating
9485 point classification depending on the value of the floating point
9486 number passed in. The possible return values must be supplied as
9487 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9488 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9489 one floating point argument which is "type generic". */
9492 fold_builtin_fpclassify (location_t loc, tree exp)
9494 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9495 arg, type, res, tmp;
9496 enum machine_mode mode;
9500 /* Verify the required arguments in the original call. */
9501 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9502 INTEGER_TYPE, INTEGER_TYPE,
9503 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9506 fp_nan = CALL_EXPR_ARG (exp, 0);
9507 fp_infinite = CALL_EXPR_ARG (exp, 1);
9508 fp_normal = CALL_EXPR_ARG (exp, 2);
9509 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9510 fp_zero = CALL_EXPR_ARG (exp, 4);
9511 arg = CALL_EXPR_ARG (exp, 5);
9512 type = TREE_TYPE (arg);
9513 mode = TYPE_MODE (type);
9514 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9518 (fabs(x) == Inf ? FP_INFINITE :
9519 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9520 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9522 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9523 build_real (type, dconst0));
9524 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9525 tmp, fp_zero, fp_subnormal);
9527 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9528 real_from_string (&r, buf);
9529 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9530 arg, build_real (type, r));
9531 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9533 if (HONOR_INFINITIES (mode))
9536 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9537 build_real (type, r));
9538 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9542 if (HONOR_NANS (mode))
9544 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9545 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9551 /* Fold a call to an unordered comparison function such as
9552 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9553 being called and ARG0 and ARG1 are the arguments for the call.
9554 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9555 the opposite of the desired result. UNORDERED_CODE is used
9556 for modes that can hold NaNs and ORDERED_CODE is used for
9560 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9561 enum tree_code unordered_code,
9562 enum tree_code ordered_code)
9564 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9565 enum tree_code code;
9567 enum tree_code code0, code1;
9568 tree cmp_type = NULL_TREE;
9570 type0 = TREE_TYPE (arg0);
9571 type1 = TREE_TYPE (arg1);
9573 code0 = TREE_CODE (type0);
9574 code1 = TREE_CODE (type1);
9576 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9577 /* Choose the wider of two real types. */
9578 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9580 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9582 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9585 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9586 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9588 if (unordered_code == UNORDERED_EXPR)
9590 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9591 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9592 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9595 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9597 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9598 fold_build2_loc (loc, code, type, arg0, arg1));
9601 /* Fold a call to built-in function FNDECL with 0 arguments.
9602 IGNORE is true if the result of the function call is ignored. This
9603 function returns NULL_TREE if no simplification was possible. */
9606 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9608 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9609 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9612 CASE_FLT_FN (BUILT_IN_INF):
9613 case BUILT_IN_INFD32:
9614 case BUILT_IN_INFD64:
9615 case BUILT_IN_INFD128:
9616 return fold_builtin_inf (loc, type, true);
9618 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9619 return fold_builtin_inf (loc, type, false);
9621 case BUILT_IN_CLASSIFY_TYPE:
9622 return fold_builtin_classify_type (NULL_TREE);
9630 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9631 IGNORE is true if the result of the function call is ignored. This
9632 function returns NULL_TREE if no simplification was possible. */
9635 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9637 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9638 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9642 case BUILT_IN_CONSTANT_P:
9644 tree val = fold_builtin_constant_p (arg0);
9646 /* Gimplification will pull the CALL_EXPR for the builtin out of
9647 an if condition. When not optimizing, we'll not CSE it back.
9648 To avoid link error types of regressions, return false now. */
9649 if (!val && !optimize)
9650 val = integer_zero_node;
9655 case BUILT_IN_CLASSIFY_TYPE:
9656 return fold_builtin_classify_type (arg0);
9658 case BUILT_IN_STRLEN:
9659 return fold_builtin_strlen (loc, arg0);
9661 CASE_FLT_FN (BUILT_IN_FABS):
9662 return fold_builtin_fabs (loc, arg0, type);
9666 case BUILT_IN_LLABS:
9667 case BUILT_IN_IMAXABS:
9668 return fold_builtin_abs (loc, arg0, type);
9670 CASE_FLT_FN (BUILT_IN_CONJ):
9671 if (validate_arg (arg0, COMPLEX_TYPE)
9672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9673 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9676 CASE_FLT_FN (BUILT_IN_CREAL):
9677 if (validate_arg (arg0, COMPLEX_TYPE)
9678 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9679 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9682 CASE_FLT_FN (BUILT_IN_CIMAG):
9683 if (validate_arg (arg0, COMPLEX_TYPE)
9684 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9685 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9688 CASE_FLT_FN (BUILT_IN_CCOS):
9689 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9691 CASE_FLT_FN (BUILT_IN_CCOSH):
9692 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9694 CASE_FLT_FN (BUILT_IN_CSIN):
9695 if (validate_arg (arg0, COMPLEX_TYPE)
9696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9697 return do_mpc_arg1 (arg0, type, mpc_sin);
9700 CASE_FLT_FN (BUILT_IN_CSINH):
9701 if (validate_arg (arg0, COMPLEX_TYPE)
9702 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9703 return do_mpc_arg1 (arg0, type, mpc_sinh);
9706 CASE_FLT_FN (BUILT_IN_CTAN):
9707 if (validate_arg (arg0, COMPLEX_TYPE)
9708 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9709 return do_mpc_arg1 (arg0, type, mpc_tan);
9712 CASE_FLT_FN (BUILT_IN_CTANH):
9713 if (validate_arg (arg0, COMPLEX_TYPE)
9714 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9715 return do_mpc_arg1 (arg0, type, mpc_tanh);
9718 CASE_FLT_FN (BUILT_IN_CLOG):
9719 if (validate_arg (arg0, COMPLEX_TYPE)
9720 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9721 return do_mpc_arg1 (arg0, type, mpc_log);
9724 CASE_FLT_FN (BUILT_IN_CSQRT):
9725 if (validate_arg (arg0, COMPLEX_TYPE)
9726 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9727 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9730 CASE_FLT_FN (BUILT_IN_CASIN):
9731 if (validate_arg (arg0, COMPLEX_TYPE)
9732 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9733 return do_mpc_arg1 (arg0, type, mpc_asin);
9736 CASE_FLT_FN (BUILT_IN_CACOS):
9737 if (validate_arg (arg0, COMPLEX_TYPE)
9738 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9739 return do_mpc_arg1 (arg0, type, mpc_acos);
9742 CASE_FLT_FN (BUILT_IN_CATAN):
9743 if (validate_arg (arg0, COMPLEX_TYPE)
9744 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9745 return do_mpc_arg1 (arg0, type, mpc_atan);
9748 CASE_FLT_FN (BUILT_IN_CASINH):
9749 if (validate_arg (arg0, COMPLEX_TYPE)
9750 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9751 return do_mpc_arg1 (arg0, type, mpc_asinh);
9754 CASE_FLT_FN (BUILT_IN_CACOSH):
9755 if (validate_arg (arg0, COMPLEX_TYPE)
9756 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9757 return do_mpc_arg1 (arg0, type, mpc_acosh);
9760 CASE_FLT_FN (BUILT_IN_CATANH):
9761 if (validate_arg (arg0, COMPLEX_TYPE)
9762 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9763 return do_mpc_arg1 (arg0, type, mpc_atanh);
9766 CASE_FLT_FN (BUILT_IN_CABS):
9767 return fold_builtin_cabs (loc, arg0, type, fndecl);
9769 CASE_FLT_FN (BUILT_IN_CARG):
9770 return fold_builtin_carg (loc, arg0, type);
9772 CASE_FLT_FN (BUILT_IN_SQRT):
9773 return fold_builtin_sqrt (loc, arg0, type);
9775 CASE_FLT_FN (BUILT_IN_CBRT):
9776 return fold_builtin_cbrt (loc, arg0, type);
9778 CASE_FLT_FN (BUILT_IN_ASIN):
9779 if (validate_arg (arg0, REAL_TYPE))
9780 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9781 &dconstm1, &dconst1, true);
9784 CASE_FLT_FN (BUILT_IN_ACOS):
9785 if (validate_arg (arg0, REAL_TYPE))
9786 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9787 &dconstm1, &dconst1, true);
9790 CASE_FLT_FN (BUILT_IN_ATAN):
9791 if (validate_arg (arg0, REAL_TYPE))
9792 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9795 CASE_FLT_FN (BUILT_IN_ASINH):
9796 if (validate_arg (arg0, REAL_TYPE))
9797 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9800 CASE_FLT_FN (BUILT_IN_ACOSH):
9801 if (validate_arg (arg0, REAL_TYPE))
9802 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9803 &dconst1, NULL, true);
9806 CASE_FLT_FN (BUILT_IN_ATANH):
9807 if (validate_arg (arg0, REAL_TYPE))
9808 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9809 &dconstm1, &dconst1, false);
9812 CASE_FLT_FN (BUILT_IN_SIN):
9813 if (validate_arg (arg0, REAL_TYPE))
9814 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9817 CASE_FLT_FN (BUILT_IN_COS):
9818 return fold_builtin_cos (loc, arg0, type, fndecl);
9820 CASE_FLT_FN (BUILT_IN_TAN):
9821 return fold_builtin_tan (arg0, type);
9823 CASE_FLT_FN (BUILT_IN_CEXP):
9824 return fold_builtin_cexp (loc, arg0, type);
9826 CASE_FLT_FN (BUILT_IN_CEXPI):
9827 if (validate_arg (arg0, REAL_TYPE))
9828 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9831 CASE_FLT_FN (BUILT_IN_SINH):
9832 if (validate_arg (arg0, REAL_TYPE))
9833 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9836 CASE_FLT_FN (BUILT_IN_COSH):
9837 return fold_builtin_cosh (loc, arg0, type, fndecl);
9839 CASE_FLT_FN (BUILT_IN_TANH):
9840 if (validate_arg (arg0, REAL_TYPE))
9841 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9844 CASE_FLT_FN (BUILT_IN_ERF):
9845 if (validate_arg (arg0, REAL_TYPE))
9846 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9849 CASE_FLT_FN (BUILT_IN_ERFC):
9850 if (validate_arg (arg0, REAL_TYPE))
9851 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9854 CASE_FLT_FN (BUILT_IN_TGAMMA):
9855 if (validate_arg (arg0, REAL_TYPE))
9856 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9859 CASE_FLT_FN (BUILT_IN_EXP):
9860 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9862 CASE_FLT_FN (BUILT_IN_EXP2):
9863 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9865 CASE_FLT_FN (BUILT_IN_EXP10):
9866 CASE_FLT_FN (BUILT_IN_POW10):
9867 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9869 CASE_FLT_FN (BUILT_IN_EXPM1):
9870 if (validate_arg (arg0, REAL_TYPE))
9871 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9874 CASE_FLT_FN (BUILT_IN_LOG):
9875 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9877 CASE_FLT_FN (BUILT_IN_LOG2):
9878 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9880 CASE_FLT_FN (BUILT_IN_LOG10):
9881 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9883 CASE_FLT_FN (BUILT_IN_LOG1P):
9884 if (validate_arg (arg0, REAL_TYPE))
9885 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9886 &dconstm1, NULL, false);
9889 CASE_FLT_FN (BUILT_IN_J0):
9890 if (validate_arg (arg0, REAL_TYPE))
9891 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9895 CASE_FLT_FN (BUILT_IN_J1):
9896 if (validate_arg (arg0, REAL_TYPE))
9897 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9901 CASE_FLT_FN (BUILT_IN_Y0):
9902 if (validate_arg (arg0, REAL_TYPE))
9903 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9904 &dconst0, NULL, false);
9907 CASE_FLT_FN (BUILT_IN_Y1):
9908 if (validate_arg (arg0, REAL_TYPE))
9909 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9910 &dconst0, NULL, false);
9913 CASE_FLT_FN (BUILT_IN_NAN):
9914 case BUILT_IN_NAND32:
9915 case BUILT_IN_NAND64:
9916 case BUILT_IN_NAND128:
9917 return fold_builtin_nan (arg0, type, true);
9919 CASE_FLT_FN (BUILT_IN_NANS):
9920 return fold_builtin_nan (arg0, type, false);
9922 CASE_FLT_FN (BUILT_IN_FLOOR):
9923 return fold_builtin_floor (loc, fndecl, arg0);
9925 CASE_FLT_FN (BUILT_IN_CEIL):
9926 return fold_builtin_ceil (loc, fndecl, arg0);
9928 CASE_FLT_FN (BUILT_IN_TRUNC):
9929 return fold_builtin_trunc (loc, fndecl, arg0);
9931 CASE_FLT_FN (BUILT_IN_ROUND):
9932 return fold_builtin_round (loc, fndecl, arg0);
9934 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9935 CASE_FLT_FN (BUILT_IN_RINT):
9936 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9938 CASE_FLT_FN (BUILT_IN_LCEIL):
9939 CASE_FLT_FN (BUILT_IN_LLCEIL):
9940 CASE_FLT_FN (BUILT_IN_LFLOOR):
9941 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9942 CASE_FLT_FN (BUILT_IN_LROUND):
9943 CASE_FLT_FN (BUILT_IN_LLROUND):
9944 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9946 CASE_FLT_FN (BUILT_IN_LRINT):
9947 CASE_FLT_FN (BUILT_IN_LLRINT):
9948 return fold_fixed_mathfn (loc, fndecl, arg0);
9950 case BUILT_IN_BSWAP32:
9951 case BUILT_IN_BSWAP64:
9952 return fold_builtin_bswap (fndecl, arg0);
9954 CASE_INT_FN (BUILT_IN_FFS):
9955 CASE_INT_FN (BUILT_IN_CLZ):
9956 CASE_INT_FN (BUILT_IN_CTZ):
9957 CASE_INT_FN (BUILT_IN_POPCOUNT):
9958 CASE_INT_FN (BUILT_IN_PARITY):
9959 return fold_builtin_bitop (fndecl, arg0);
9961 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9962 return fold_builtin_signbit (loc, arg0, type);
9964 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9965 return fold_builtin_significand (loc, arg0, type);
9967 CASE_FLT_FN (BUILT_IN_ILOGB):
9968 CASE_FLT_FN (BUILT_IN_LOGB):
9969 return fold_builtin_logb (loc, arg0, type);
9971 case BUILT_IN_ISASCII:
9972 return fold_builtin_isascii (loc, arg0);
9974 case BUILT_IN_TOASCII:
9975 return fold_builtin_toascii (loc, arg0);
9977 case BUILT_IN_ISDIGIT:
9978 return fold_builtin_isdigit (loc, arg0);
9980 CASE_FLT_FN (BUILT_IN_FINITE):
9981 case BUILT_IN_FINITED32:
9982 case BUILT_IN_FINITED64:
9983 case BUILT_IN_FINITED128:
9984 case BUILT_IN_ISFINITE:
9986 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9989 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9992 CASE_FLT_FN (BUILT_IN_ISINF):
9993 case BUILT_IN_ISINFD32:
9994 case BUILT_IN_ISINFD64:
9995 case BUILT_IN_ISINFD128:
9997 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10000 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10003 case BUILT_IN_ISNORMAL:
10004 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10006 case BUILT_IN_ISINF_SIGN:
10007 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10009 CASE_FLT_FN (BUILT_IN_ISNAN):
10010 case BUILT_IN_ISNAND32:
10011 case BUILT_IN_ISNAND64:
10012 case BUILT_IN_ISNAND128:
10013 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10015 case BUILT_IN_PRINTF:
10016 case BUILT_IN_PRINTF_UNLOCKED:
10017 case BUILT_IN_VPRINTF:
10018 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10028 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10029 IGNORE is true if the result of the function call is ignored. This
10030 function returns NULL_TREE if no simplification was possible. */
10033 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10035 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10036 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10040 CASE_FLT_FN (BUILT_IN_JN):
10041 if (validate_arg (arg0, INTEGER_TYPE)
10042 && validate_arg (arg1, REAL_TYPE))
10043 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10046 CASE_FLT_FN (BUILT_IN_YN):
10047 if (validate_arg (arg0, INTEGER_TYPE)
10048 && validate_arg (arg1, REAL_TYPE))
10049 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10053 CASE_FLT_FN (BUILT_IN_DREM):
10054 CASE_FLT_FN (BUILT_IN_REMAINDER):
10055 if (validate_arg (arg0, REAL_TYPE)
10056 && validate_arg(arg1, REAL_TYPE))
10057 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10060 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10061 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10062 if (validate_arg (arg0, REAL_TYPE)
10063 && validate_arg(arg1, POINTER_TYPE))
10064 return do_mpfr_lgamma_r (arg0, arg1, type);
10067 CASE_FLT_FN (BUILT_IN_ATAN2):
10068 if (validate_arg (arg0, REAL_TYPE)
10069 && validate_arg(arg1, REAL_TYPE))
10070 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10073 CASE_FLT_FN (BUILT_IN_FDIM):
10074 if (validate_arg (arg0, REAL_TYPE)
10075 && validate_arg(arg1, REAL_TYPE))
10076 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10079 CASE_FLT_FN (BUILT_IN_HYPOT):
10080 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10082 CASE_FLT_FN (BUILT_IN_CPOW):
10083 if (validate_arg (arg0, COMPLEX_TYPE)
10084 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10085 && validate_arg (arg1, COMPLEX_TYPE)
10086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10087 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10090 CASE_FLT_FN (BUILT_IN_LDEXP):
10091 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10092 CASE_FLT_FN (BUILT_IN_SCALBN):
10093 CASE_FLT_FN (BUILT_IN_SCALBLN):
10094 return fold_builtin_load_exponent (loc, arg0, arg1,
10095 type, /*ldexp=*/false);
10097 CASE_FLT_FN (BUILT_IN_FREXP):
10098 return fold_builtin_frexp (loc, arg0, arg1, type);
10100 CASE_FLT_FN (BUILT_IN_MODF):
10101 return fold_builtin_modf (loc, arg0, arg1, type);
10103 case BUILT_IN_BZERO:
10104 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10106 case BUILT_IN_FPUTS:
10107 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10109 case BUILT_IN_FPUTS_UNLOCKED:
10110 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10112 case BUILT_IN_STRSTR:
10113 return fold_builtin_strstr (loc, arg0, arg1, type);
10115 case BUILT_IN_STRCAT:
10116 return fold_builtin_strcat (loc, arg0, arg1);
10118 case BUILT_IN_STRSPN:
10119 return fold_builtin_strspn (loc, arg0, arg1);
10121 case BUILT_IN_STRCSPN:
10122 return fold_builtin_strcspn (loc, arg0, arg1);
10124 case BUILT_IN_STRCHR:
10125 case BUILT_IN_INDEX:
10126 return fold_builtin_strchr (loc, arg0, arg1, type);
10128 case BUILT_IN_STRRCHR:
10129 case BUILT_IN_RINDEX:
10130 return fold_builtin_strrchr (loc, arg0, arg1, type);
10132 case BUILT_IN_STRCPY:
10133 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10135 case BUILT_IN_STPCPY:
10138 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10142 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10145 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10148 case BUILT_IN_STRCMP:
10149 return fold_builtin_strcmp (loc, arg0, arg1);
10151 case BUILT_IN_STRPBRK:
10152 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10154 case BUILT_IN_EXPECT:
10155 return fold_builtin_expect (loc, arg0, arg1);
10157 CASE_FLT_FN (BUILT_IN_POW):
10158 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10160 CASE_FLT_FN (BUILT_IN_POWI):
10161 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10163 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10164 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10166 CASE_FLT_FN (BUILT_IN_FMIN):
10167 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10169 CASE_FLT_FN (BUILT_IN_FMAX):
10170 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10172 case BUILT_IN_ISGREATER:
10173 return fold_builtin_unordered_cmp (loc, fndecl,
10174 arg0, arg1, UNLE_EXPR, LE_EXPR);
10175 case BUILT_IN_ISGREATEREQUAL:
10176 return fold_builtin_unordered_cmp (loc, fndecl,
10177 arg0, arg1, UNLT_EXPR, LT_EXPR);
10178 case BUILT_IN_ISLESS:
10179 return fold_builtin_unordered_cmp (loc, fndecl,
10180 arg0, arg1, UNGE_EXPR, GE_EXPR);
10181 case BUILT_IN_ISLESSEQUAL:
10182 return fold_builtin_unordered_cmp (loc, fndecl,
10183 arg0, arg1, UNGT_EXPR, GT_EXPR);
10184 case BUILT_IN_ISLESSGREATER:
10185 return fold_builtin_unordered_cmp (loc, fndecl,
10186 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10187 case BUILT_IN_ISUNORDERED:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNORDERED_EXPR,
10192 /* We do the folding for va_start in the expander. */
10193 case BUILT_IN_VA_START:
10196 case BUILT_IN_SPRINTF:
10197 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10199 case BUILT_IN_OBJECT_SIZE:
10200 return fold_builtin_object_size (arg0, arg1);
10202 case BUILT_IN_PRINTF:
10203 case BUILT_IN_PRINTF_UNLOCKED:
10204 case BUILT_IN_VPRINTF:
10205 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10207 case BUILT_IN_PRINTF_CHK:
10208 case BUILT_IN_VPRINTF_CHK:
10209 if (!validate_arg (arg0, INTEGER_TYPE)
10210 || TREE_SIDE_EFFECTS (arg0))
10213 return fold_builtin_printf (loc, fndecl,
10214 arg1, NULL_TREE, ignore, fcode);
10217 case BUILT_IN_FPRINTF:
10218 case BUILT_IN_FPRINTF_UNLOCKED:
10219 case BUILT_IN_VFPRINTF:
10220 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10229 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10230 and ARG2. IGNORE is true if the result of the function call is ignored.
10231 This function returns NULL_TREE if no simplification was possible. */
10234 fold_builtin_3 (location_t loc, tree fndecl,
10235 tree arg0, tree arg1, tree arg2, bool ignore)
10237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10238 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10242 CASE_FLT_FN (BUILT_IN_SINCOS):
10243 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10245 CASE_FLT_FN (BUILT_IN_FMA):
10246 if (validate_arg (arg0, REAL_TYPE)
10247 && validate_arg(arg1, REAL_TYPE)
10248 && validate_arg(arg2, REAL_TYPE))
10249 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10252 CASE_FLT_FN (BUILT_IN_REMQUO):
10253 if (validate_arg (arg0, REAL_TYPE)
10254 && validate_arg(arg1, REAL_TYPE)
10255 && validate_arg(arg2, POINTER_TYPE))
10256 return do_mpfr_remquo (arg0, arg1, arg2);
10259 case BUILT_IN_MEMSET:
10260 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10262 case BUILT_IN_BCOPY:
10263 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10264 void_type_node, true, /*endp=*/3);
10266 case BUILT_IN_MEMCPY:
10267 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10268 type, ignore, /*endp=*/0);
10270 case BUILT_IN_MEMPCPY:
10271 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10272 type, ignore, /*endp=*/1);
10274 case BUILT_IN_MEMMOVE:
10275 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10276 type, ignore, /*endp=*/3);
10278 case BUILT_IN_STRNCAT:
10279 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10281 case BUILT_IN_STRNCPY:
10282 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10284 case BUILT_IN_STRNCMP:
10285 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10287 case BUILT_IN_MEMCHR:
10288 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10290 case BUILT_IN_BCMP:
10291 case BUILT_IN_MEMCMP:
10292 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10294 case BUILT_IN_SPRINTF:
10295 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10297 case BUILT_IN_STRCPY_CHK:
10298 case BUILT_IN_STPCPY_CHK:
10299 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10302 case BUILT_IN_STRCAT_CHK:
10303 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10305 case BUILT_IN_PRINTF_CHK:
10306 case BUILT_IN_VPRINTF_CHK:
10307 if (!validate_arg (arg0, INTEGER_TYPE)
10308 || TREE_SIDE_EFFECTS (arg0))
10311 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10314 case BUILT_IN_FPRINTF:
10315 case BUILT_IN_FPRINTF_UNLOCKED:
10316 case BUILT_IN_VFPRINTF:
10317 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10320 case BUILT_IN_FPRINTF_CHK:
10321 case BUILT_IN_VFPRINTF_CHK:
10322 if (!validate_arg (arg1, INTEGER_TYPE)
10323 || TREE_SIDE_EFFECTS (arg1))
10326 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10335 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10336 ARG2, and ARG3. IGNORE is true if the result of the function call is
10337 ignored. This function returns NULL_TREE if no simplification was
10341 fold_builtin_4 (location_t loc, tree fndecl,
10342 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10344 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10348 case BUILT_IN_MEMCPY_CHK:
10349 case BUILT_IN_MEMPCPY_CHK:
10350 case BUILT_IN_MEMMOVE_CHK:
10351 case BUILT_IN_MEMSET_CHK:
10352 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10354 DECL_FUNCTION_CODE (fndecl));
10356 case BUILT_IN_STRNCPY_CHK:
10357 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10359 case BUILT_IN_STRNCAT_CHK:
10360 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10362 case BUILT_IN_FPRINTF_CHK:
10363 case BUILT_IN_VFPRINTF_CHK:
10364 if (!validate_arg (arg1, INTEGER_TYPE)
10365 || TREE_SIDE_EFFECTS (arg1))
10368 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10378 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10379 arguments, where NARGS <= 4. IGNORE is true if the result of the
10380 function call is ignored. This function returns NULL_TREE if no
10381 simplification was possible. Note that this only folds builtins with
10382 fixed argument patterns. Foldings that do varargs-to-varargs
10383 transformations, or that match calls with more than 4 arguments,
10384 need to be handled with fold_builtin_varargs instead. */
10386 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10389 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10391 tree ret = NULL_TREE;
10396 ret = fold_builtin_0 (loc, fndecl, ignore);
10399 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10402 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10405 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10408 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10416 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10417 SET_EXPR_LOCATION (ret, loc);
10418 TREE_NO_WARNING (ret) = 1;
10424 /* Builtins with folding operations that operate on "..." arguments
10425 need special handling; we need to store the arguments in a convenient
10426 data structure before attempting any folding. Fortunately there are
10427 only a few builtins that fall into this category. FNDECL is the
10428 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10429 result of the function call is ignored. */
10432 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10433 bool ignore ATTRIBUTE_UNUSED)
10435 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10436 tree ret = NULL_TREE;
10440 case BUILT_IN_SPRINTF_CHK:
10441 case BUILT_IN_VSPRINTF_CHK:
10442 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10445 case BUILT_IN_SNPRINTF_CHK:
10446 case BUILT_IN_VSNPRINTF_CHK:
10447 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10450 case BUILT_IN_FPCLASSIFY:
10451 ret = fold_builtin_fpclassify (loc, exp);
10459 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10460 SET_EXPR_LOCATION (ret, loc);
10461 TREE_NO_WARNING (ret) = 1;
10467 /* Return true if FNDECL shouldn't be folded right now.
10468 If a built-in function has an inline attribute always_inline
10469 wrapper, defer folding it after always_inline functions have
10470 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10471 might not be performed. */
10474 avoid_folding_inline_builtin (tree fndecl)
10476 return (DECL_DECLARED_INLINE_P (fndecl)
10477 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10479 && !cfun->always_inline_functions_inlined
10480 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10483 /* A wrapper function for builtin folding that prevents warnings for
10484 "statement without effect" and the like, caused by removing the
10485 call node earlier than the warning is generated. */
10488 fold_call_expr (location_t loc, tree exp, bool ignore)
10490 tree ret = NULL_TREE;
10491 tree fndecl = get_callee_fndecl (exp);
10493 && TREE_CODE (fndecl) == FUNCTION_DECL
10494 && DECL_BUILT_IN (fndecl)
10495 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10496 yet. Defer folding until we see all the arguments
10497 (after inlining). */
10498 && !CALL_EXPR_VA_ARG_PACK (exp))
10500 int nargs = call_expr_nargs (exp);
10502 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10503 instead last argument is __builtin_va_arg_pack (). Defer folding
10504 even in that case, until arguments are finalized. */
10505 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10507 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10509 && TREE_CODE (fndecl2) == FUNCTION_DECL
10510 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10511 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10515 if (avoid_folding_inline_builtin (fndecl))
10518 /* FIXME: Don't use a list in this interface. */
10519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10520 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10523 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10525 tree *args = CALL_EXPR_ARGP (exp);
10526 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10529 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10537 /* Conveniently construct a function call expression. FNDECL names the
10538 function to be called and ARGLIST is a TREE_LIST of arguments. */
10541 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10543 tree fntype = TREE_TYPE (fndecl);
10544 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10545 int n = list_length (arglist);
10546 tree *argarray = (tree *) alloca (n * sizeof (tree));
10549 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10550 argarray[i] = TREE_VALUE (arglist);
10551 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10554 /* Conveniently construct a function call expression. FNDECL names the
10555 function to be called, N is the number of arguments, and the "..."
10556 parameters are the argument expressions. */
10559 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10562 tree fntype = TREE_TYPE (fndecl);
10563 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10564 tree *argarray = (tree *) alloca (n * sizeof (tree));
10568 for (i = 0; i < n; i++)
10569 argarray[i] = va_arg (ap, tree);
10571 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10574 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10575 N arguments are passed in the array ARGARRAY. */
10578 fold_builtin_call_array (location_t loc, tree type,
10583 tree ret = NULL_TREE;
10587 if (TREE_CODE (fn) == ADDR_EXPR)
10589 tree fndecl = TREE_OPERAND (fn, 0);
10590 if (TREE_CODE (fndecl) == FUNCTION_DECL
10591 && DECL_BUILT_IN (fndecl))
10593 /* If last argument is __builtin_va_arg_pack (), arguments to this
10594 function are not finalized yet. Defer folding until they are. */
10595 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10597 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10599 && TREE_CODE (fndecl2) == FUNCTION_DECL
10600 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10601 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10602 return build_call_array_loc (loc, type, fn, n, argarray);
10604 if (avoid_folding_inline_builtin (fndecl))
10605 return build_call_array_loc (loc, type, fn, n, argarray);
10606 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10608 tree arglist = NULL_TREE;
10609 for (i = n - 1; i >= 0; i--)
10610 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10611 ret = targetm.fold_builtin (fndecl, arglist, false);
10614 return build_call_array_loc (loc, type, fn, n, argarray);
10616 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10618 /* First try the transformations that don't require consing up
10620 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10625 /* If we got this far, we need to build an exp. */
10626 exp = build_call_array_loc (loc, type, fn, n, argarray);
10627 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10628 return ret ? ret : exp;
10632 return build_call_array_loc (loc, type, fn, n, argarray);
10635 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10636 along with N new arguments specified as the "..." parameters. SKIP
10637 is the number of arguments in EXP to be omitted. This function is used
10638 to do varargs-to-varargs transformations. */
10641 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10643 int oldnargs = call_expr_nargs (exp);
10644 int nargs = oldnargs - skip + n;
10645 tree fntype = TREE_TYPE (fndecl);
10646 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10654 buffer = XALLOCAVEC (tree, nargs);
10656 for (i = 0; i < n; i++)
10657 buffer[i] = va_arg (ap, tree);
10659 for (j = skip; j < oldnargs; j++, i++)
10660 buffer[i] = CALL_EXPR_ARG (exp, j);
10663 buffer = CALL_EXPR_ARGP (exp) + skip;
10665 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10668 /* Validate a single argument ARG against a tree code CODE representing
10672 validate_arg (const_tree arg, enum tree_code code)
10676 else if (code == POINTER_TYPE)
10677 return POINTER_TYPE_P (TREE_TYPE (arg));
10678 else if (code == INTEGER_TYPE)
10679 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10680 return code == TREE_CODE (TREE_TYPE (arg));
10683 /* This function validates the types of a function call argument list
10684 against a specified list of tree_codes. If the last specifier is a 0,
10685 that represents an ellipses, otherwise the last specifier must be a
10688 This is the GIMPLE version of validate_arglist. Eventually we want to
10689 completely convert builtins.c to work from GIMPLEs and the tree based
10690 validate_arglist will then be removed. */
10693 validate_gimple_arglist (const_gimple call, ...)
10695 enum tree_code code;
10701 va_start (ap, call);
10706 code = (enum tree_code) va_arg (ap, int);
10710 /* This signifies an ellipses, any further arguments are all ok. */
10714 /* This signifies an endlink, if no arguments remain, return
10715 true, otherwise return false. */
10716 res = (i == gimple_call_num_args (call));
10719 /* If no parameters remain or the parameter's code does not
10720 match the specified code, return false. Otherwise continue
10721 checking any remaining arguments. */
10722 arg = gimple_call_arg (call, i++);
10723 if (!validate_arg (arg, code))
10730 /* We need gotos here since we can only have one VA_CLOSE in a
10738 /* This function validates the types of a function call argument list
10739 against a specified list of tree_codes. If the last specifier is a 0,
10740 that represents an ellipses, otherwise the last specifier must be a
10744 validate_arglist (const_tree callexpr, ...)
10746 enum tree_code code;
10749 const_call_expr_arg_iterator iter;
10752 va_start (ap, callexpr);
10753 init_const_call_expr_arg_iterator (callexpr, &iter);
10757 code = (enum tree_code) va_arg (ap, int);
10761 /* This signifies an ellipses, any further arguments are all ok. */
10765 /* This signifies an endlink, if no arguments remain, return
10766 true, otherwise return false. */
10767 res = !more_const_call_expr_args_p (&iter);
10770 /* If no parameters remain or the parameter's code does not
10771 match the specified code, return false. Otherwise continue
10772 checking any remaining arguments. */
10773 arg = next_const_call_expr_arg (&iter);
10774 if (!validate_arg (arg, code))
10781 /* We need gotos here since we can only have one VA_CLOSE in a
10789 /* Default target-specific builtin expander that does nothing. */
10792 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10793 rtx target ATTRIBUTE_UNUSED,
10794 rtx subtarget ATTRIBUTE_UNUSED,
10795 enum machine_mode mode ATTRIBUTE_UNUSED,
10796 int ignore ATTRIBUTE_UNUSED)
10801 /* Returns true is EXP represents data that would potentially reside
10802 in a readonly section. */
10805 readonly_data_expr (tree exp)
10809 if (TREE_CODE (exp) != ADDR_EXPR)
10812 exp = get_base_address (TREE_OPERAND (exp, 0));
10816 /* Make sure we call decl_readonly_section only for trees it
10817 can handle (since it returns true for everything it doesn't
10819 if (TREE_CODE (exp) == STRING_CST
10820 || TREE_CODE (exp) == CONSTRUCTOR
10821 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10822 return decl_readonly_section (exp, 0);
10827 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10828 to the call, and TYPE is its return type.
10830 Return NULL_TREE if no simplification was possible, otherwise return the
10831 simplified form of the call as a tree.
10833 The simplified form may be a constant or other expression which
10834 computes the same value, but in a more efficient manner (including
10835 calls to other builtin functions).
10837 The call may contain arguments which need to be evaluated, but
10838 which are not useful to determine the result of the call. In
10839 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10840 COMPOUND_EXPR will be an argument which must be evaluated.
10841 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10842 COMPOUND_EXPR in the chain will contain the tree for the simplified
10843 form of the builtin function call. */
10846 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10848 if (!validate_arg (s1, POINTER_TYPE)
10849 || !validate_arg (s2, POINTER_TYPE))
10854 const char *p1, *p2;
10856 p2 = c_getstr (s2);
10860 p1 = c_getstr (s1);
10863 const char *r = strstr (p1, p2);
10867 return build_int_cst (TREE_TYPE (s1), 0);
10869 /* Return an offset into the constant string argument. */
10870 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10871 s1, size_int (r - p1));
10872 return fold_convert_loc (loc, type, tem);
10875 /* The argument is const char *, and the result is char *, so we need
10876 a type conversion here to avoid a warning. */
10878 return fold_convert_loc (loc, type, s1);
10883 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10887 /* New argument list transforming strstr(s1, s2) to
10888 strchr(s1, s2[0]). */
10889 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10893 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10894 the call, and TYPE is its return type.
10896 Return NULL_TREE if no simplification was possible, otherwise return the
10897 simplified form of the call as a tree.
10899 The simplified form may be a constant or other expression which
10900 computes the same value, but in a more efficient manner (including
10901 calls to other builtin functions).
10903 The call may contain arguments which need to be evaluated, but
10904 which are not useful to determine the result of the call. In
10905 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10906 COMPOUND_EXPR will be an argument which must be evaluated.
10907 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10908 COMPOUND_EXPR in the chain will contain the tree for the simplified
10909 form of the builtin function call. */
10912 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10914 if (!validate_arg (s1, POINTER_TYPE)
10915 || !validate_arg (s2, INTEGER_TYPE))
10921 if (TREE_CODE (s2) != INTEGER_CST)
10924 p1 = c_getstr (s1);
10931 if (target_char_cast (s2, &c))
10934 r = strchr (p1, c);
10937 return build_int_cst (TREE_TYPE (s1), 0);
10939 /* Return an offset into the constant string argument. */
10940 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10941 s1, size_int (r - p1));
10942 return fold_convert_loc (loc, type, tem);
10948 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10949 the call, and TYPE is its return type.
10951 Return NULL_TREE if no simplification was possible, otherwise return the
10952 simplified form of the call as a tree.
10954 The simplified form may be a constant or other expression which
10955 computes the same value, but in a more efficient manner (including
10956 calls to other builtin functions).
10958 The call may contain arguments which need to be evaluated, but
10959 which are not useful to determine the result of the call. In
10960 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10961 COMPOUND_EXPR will be an argument which must be evaluated.
10962 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10963 COMPOUND_EXPR in the chain will contain the tree for the simplified
10964 form of the builtin function call. */
10967 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10969 if (!validate_arg (s1, POINTER_TYPE)
10970 || !validate_arg (s2, INTEGER_TYPE))
10977 if (TREE_CODE (s2) != INTEGER_CST)
10980 p1 = c_getstr (s1);
10987 if (target_char_cast (s2, &c))
10990 r = strrchr (p1, c);
10993 return build_int_cst (TREE_TYPE (s1), 0);
10995 /* Return an offset into the constant string argument. */
10996 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10997 s1, size_int (r - p1));
10998 return fold_convert_loc (loc, type, tem);
11001 if (! integer_zerop (s2))
11004 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11008 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11009 return build_call_expr_loc (loc, fn, 2, s1, s2);
11013 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11014 to the call, and TYPE is its return type.
11016 Return NULL_TREE if no simplification was possible, otherwise return the
11017 simplified form of the call as a tree.
11019 The simplified form may be a constant or other expression which
11020 computes the same value, but in a more efficient manner (including
11021 calls to other builtin functions).
11023 The call may contain arguments which need to be evaluated, but
11024 which are not useful to determine the result of the call. In
11025 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11026 COMPOUND_EXPR will be an argument which must be evaluated.
11027 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11028 COMPOUND_EXPR in the chain will contain the tree for the simplified
11029 form of the builtin function call. */
11032 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11034 if (!validate_arg (s1, POINTER_TYPE)
11035 || !validate_arg (s2, POINTER_TYPE))
11040 const char *p1, *p2;
11042 p2 = c_getstr (s2);
11046 p1 = c_getstr (s1);
11049 const char *r = strpbrk (p1, p2);
11053 return build_int_cst (TREE_TYPE (s1), 0);
11055 /* Return an offset into the constant string argument. */
11056 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11057 s1, size_int (r - p1));
11058 return fold_convert_loc (loc, type, tem);
11062 /* strpbrk(x, "") == NULL.
11063 Evaluate and ignore s1 in case it had side-effects. */
11064 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11067 return NULL_TREE; /* Really call strpbrk. */
11069 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11073 /* New argument list transforming strpbrk(s1, s2) to
11074 strchr(s1, s2[0]). */
11075 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11079 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11082 Return NULL_TREE if no simplification was possible, otherwise return the
11083 simplified form of the call as a tree.
11085 The simplified form may be a constant or other expression which
11086 computes the same value, but in a more efficient manner (including
11087 calls to other builtin functions).
11089 The call may contain arguments which need to be evaluated, but
11090 which are not useful to determine the result of the call. In
11091 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11092 COMPOUND_EXPR will be an argument which must be evaluated.
11093 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11094 COMPOUND_EXPR in the chain will contain the tree for the simplified
11095 form of the builtin function call. */
11098 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11100 if (!validate_arg (dst, POINTER_TYPE)
11101 || !validate_arg (src, POINTER_TYPE))
11105 const char *p = c_getstr (src);
11107 /* If the string length is zero, return the dst parameter. */
11108 if (p && *p == '\0')
11111 if (optimize_insn_for_speed_p ())
11113 /* See if we can store by pieces into (dst + strlen(dst)). */
11115 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11116 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11118 if (!strlen_fn || !strcpy_fn)
11121 /* If we don't have a movstr we don't want to emit an strcpy
11122 call. We have to do that if the length of the source string
11123 isn't computable (in that case we can use memcpy probably
11124 later expanding to a sequence of mov instructions). If we
11125 have movstr instructions we can emit strcpy calls. */
11128 tree len = c_strlen (src, 1);
11129 if (! len || TREE_SIDE_EFFECTS (len))
11133 /* Stabilize the argument list. */
11134 dst = builtin_save_expr (dst);
11136 /* Create strlen (dst). */
11137 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11138 /* Create (dst p+ strlen (dst)). */
11140 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11141 TREE_TYPE (dst), dst, newdst);
11142 newdst = builtin_save_expr (newdst);
11144 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11145 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11151 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11152 arguments to the call.
11154 Return NULL_TREE if no simplification was possible, otherwise return the
11155 simplified form of the call as a tree.
11157 The simplified form may be a constant or other expression which
11158 computes the same value, but in a more efficient manner (including
11159 calls to other builtin functions).
11161 The call may contain arguments which need to be evaluated, but
11162 which are not useful to determine the result of the call. In
11163 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11164 COMPOUND_EXPR will be an argument which must be evaluated.
11165 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11166 COMPOUND_EXPR in the chain will contain the tree for the simplified
11167 form of the builtin function call. */
11170 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11172 if (!validate_arg (dst, POINTER_TYPE)
11173 || !validate_arg (src, POINTER_TYPE)
11174 || !validate_arg (len, INTEGER_TYPE))
11178 const char *p = c_getstr (src);
11180 /* If the requested length is zero, or the src parameter string
11181 length is zero, return the dst parameter. */
11182 if (integer_zerop (len) || (p && *p == '\0'))
11183 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11185 /* If the requested len is greater than or equal to the string
11186 length, call strcat. */
11187 if (TREE_CODE (len) == INTEGER_CST && p
11188 && compare_tree_int (len, strlen (p)) >= 0)
11190 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11192 /* If the replacement _DECL isn't initialized, don't do the
11197 return build_call_expr_loc (loc, fn, 2, dst, src);
11203 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11206 Return NULL_TREE if no simplification was possible, otherwise return the
11207 simplified form of the call as a tree.
11209 The simplified form may be a constant or other expression which
11210 computes the same value, but in a more efficient manner (including
11211 calls to other builtin functions).
11213 The call may contain arguments which need to be evaluated, but
11214 which are not useful to determine the result of the call. In
11215 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11216 COMPOUND_EXPR will be an argument which must be evaluated.
11217 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11218 COMPOUND_EXPR in the chain will contain the tree for the simplified
11219 form of the builtin function call. */
11222 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11224 if (!validate_arg (s1, POINTER_TYPE)
11225 || !validate_arg (s2, POINTER_TYPE))
11229 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11231 /* If both arguments are constants, evaluate at compile-time. */
11234 const size_t r = strspn (p1, p2);
11235 return size_int (r);
11238 /* If either argument is "", return NULL_TREE. */
11239 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11240 /* Evaluate and ignore both arguments in case either one has
11242 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11248 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11251 Return NULL_TREE if no simplification was possible, otherwise return the
11252 simplified form of the call as a tree.
11254 The simplified form may be a constant or other expression which
11255 computes the same value, but in a more efficient manner (including
11256 calls to other builtin functions).
11258 The call may contain arguments which need to be evaluated, but
11259 which are not useful to determine the result of the call. In
11260 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11261 COMPOUND_EXPR will be an argument which must be evaluated.
11262 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11263 COMPOUND_EXPR in the chain will contain the tree for the simplified
11264 form of the builtin function call. */
11267 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11269 if (!validate_arg (s1, POINTER_TYPE)
11270 || !validate_arg (s2, POINTER_TYPE))
11274 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11276 /* If both arguments are constants, evaluate at compile-time. */
11279 const size_t r = strcspn (p1, p2);
11280 return size_int (r);
11283 /* If the first argument is "", return NULL_TREE. */
11284 if (p1 && *p1 == '\0')
11286 /* Evaluate and ignore argument s2 in case it has
11288 return omit_one_operand_loc (loc, size_type_node,
11289 size_zero_node, s2);
11292 /* If the second argument is "", return __builtin_strlen(s1). */
11293 if (p2 && *p2 == '\0')
11295 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11297 /* If the replacement _DECL isn't initialized, don't do the
11302 return build_call_expr_loc (loc, fn, 1, s1);
11308 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11309 to the call. IGNORE is true if the value returned
11310 by the builtin will be ignored. UNLOCKED is true is true if this
11311 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11312 the known length of the string. Return NULL_TREE if no simplification
11316 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11317 bool ignore, bool unlocked, tree len)
11319 /* If we're using an unlocked function, assume the other unlocked
11320 functions exist explicitly. */
11321 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11322 : implicit_built_in_decls[BUILT_IN_FPUTC];
11323 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11324 : implicit_built_in_decls[BUILT_IN_FWRITE];
11326 /* If the return value is used, don't do the transformation. */
11330 /* Verify the arguments in the original call. */
11331 if (!validate_arg (arg0, POINTER_TYPE)
11332 || !validate_arg (arg1, POINTER_TYPE))
11336 len = c_strlen (arg0, 0);
11338 /* Get the length of the string passed to fputs. If the length
11339 can't be determined, punt. */
11341 || TREE_CODE (len) != INTEGER_CST)
11344 switch (compare_tree_int (len, 1))
11346 case -1: /* length is 0, delete the call entirely . */
11347 return omit_one_operand_loc (loc, integer_type_node,
11348 integer_zero_node, arg1);;
11350 case 0: /* length is 1, call fputc. */
11352 const char *p = c_getstr (arg0);
11357 return build_call_expr_loc (loc, fn_fputc, 2,
11358 build_int_cst (NULL_TREE, p[0]), arg1);
11364 case 1: /* length is greater than 1, call fwrite. */
11366 /* If optimizing for size keep fputs. */
11367 if (optimize_function_for_size_p (cfun))
11369 /* New argument list transforming fputs(string, stream) to
11370 fwrite(string, 1, len, stream). */
11372 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11373 size_one_node, len, arg1);
11378 gcc_unreachable ();
11383 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11384 produced. False otherwise. This is done so that we don't output the error
11385 or warning twice or three times. */
11388 fold_builtin_next_arg (tree exp, bool va_start_p)
11390 tree fntype = TREE_TYPE (current_function_decl);
11391 int nargs = call_expr_nargs (exp);
11394 if (TYPE_ARG_TYPES (fntype) == 0
11395 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11396 == void_type_node))
11398 error ("%<va_start%> used in function with fixed args");
11404 if (va_start_p && (nargs != 2))
11406 error ("wrong number of arguments to function %<va_start%>");
11409 arg = CALL_EXPR_ARG (exp, 1);
11411 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11412 when we checked the arguments and if needed issued a warning. */
11417 /* Evidently an out of date version of <stdarg.h>; can't validate
11418 va_start's second argument, but can still work as intended. */
11419 warning (0, "%<__builtin_next_arg%> called without an argument");
11422 else if (nargs > 1)
11424 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11427 arg = CALL_EXPR_ARG (exp, 0);
11430 if (TREE_CODE (arg) == SSA_NAME)
11431 arg = SSA_NAME_VAR (arg);
11433 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11434 or __builtin_next_arg (0) the first time we see it, after checking
11435 the arguments and if needed issuing a warning. */
11436 if (!integer_zerop (arg))
11438 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11440 /* Strip off all nops for the sake of the comparison. This
11441 is not quite the same as STRIP_NOPS. It does more.
11442 We must also strip off INDIRECT_EXPR for C++ reference
11444 while (CONVERT_EXPR_P (arg)
11445 || TREE_CODE (arg) == INDIRECT_REF)
11446 arg = TREE_OPERAND (arg, 0);
11447 if (arg != last_parm)
11449 /* FIXME: Sometimes with the tree optimizers we can get the
11450 not the last argument even though the user used the last
11451 argument. We just warn and set the arg to be the last
11452 argument so that we will get wrong-code because of
11454 warning (0, "second parameter of %<va_start%> not last named argument");
11457 /* Undefined by C99 7.15.1.4p4 (va_start):
11458 "If the parameter parmN is declared with the register storage
11459 class, with a function or array type, or with a type that is
11460 not compatible with the type that results after application of
11461 the default argument promotions, the behavior is undefined."
11463 else if (DECL_REGISTER (arg))
11464 warning (0, "undefined behaviour when second parameter of "
11465 "%<va_start%> is declared with %<register%> storage");
11467 /* We want to verify the second parameter just once before the tree
11468 optimizers are run and then avoid keeping it in the tree,
11469 as otherwise we could warn even for correct code like:
11470 void foo (int i, ...)
11471 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11473 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11475 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11481 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11482 ORIG may be null if this is a 2-argument call. We don't attempt to
11483 simplify calls with more than 3 arguments.
11485 Return NULL_TREE if no simplification was possible, otherwise return the
11486 simplified form of the call as a tree. If IGNORED is true, it means that
11487 the caller does not use the returned value of the function. */
11490 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11491 tree orig, int ignored)
11494 const char *fmt_str = NULL;
11496 /* Verify the required arguments in the original call. We deal with two
11497 types of sprintf() calls: 'sprintf (str, fmt)' and
11498 'sprintf (dest, "%s", orig)'. */
11499 if (!validate_arg (dest, POINTER_TYPE)
11500 || !validate_arg (fmt, POINTER_TYPE))
11502 if (orig && !validate_arg (orig, POINTER_TYPE))
11505 /* Check whether the format is a literal string constant. */
11506 fmt_str = c_getstr (fmt);
11507 if (fmt_str == NULL)
11511 retval = NULL_TREE;
11513 if (!init_target_chars ())
11516 /* If the format doesn't contain % args or %%, use strcpy. */
11517 if (strchr (fmt_str, target_percent) == NULL)
11519 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11524 /* Don't optimize sprintf (buf, "abc", ptr++). */
11528 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11529 'format' is known to contain no % formats. */
11530 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11532 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11535 /* If the format is "%s", use strcpy if the result isn't used. */
11536 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11539 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11544 /* Don't crash on sprintf (str1, "%s"). */
11548 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11551 retval = c_strlen (orig, 1);
11552 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11555 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11558 if (call && retval)
11560 retval = fold_convert_loc
11561 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11563 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11569 /* Expand a call EXP to __builtin_object_size. */
11572 expand_builtin_object_size (tree exp)
11575 int object_size_type;
11576 tree fndecl = get_callee_fndecl (exp);
11578 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11580 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11582 expand_builtin_trap ();
11586 ost = CALL_EXPR_ARG (exp, 1);
11589 if (TREE_CODE (ost) != INTEGER_CST
11590 || tree_int_cst_sgn (ost) < 0
11591 || compare_tree_int (ost, 3) > 0)
11593 error ("%Klast argument of %D is not integer constant between 0 and 3",
11595 expand_builtin_trap ();
11599 object_size_type = tree_low_cst (ost, 0);
11601 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11604 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11605 FCODE is the BUILT_IN_* to use.
11606 Return NULL_RTX if we failed; the caller should emit a normal call,
11607 otherwise try to get the result in TARGET, if convenient (and in
11608 mode MODE if that's convenient). */
11611 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11612 enum built_in_function fcode)
11614 tree dest, src, len, size;
11616 if (!validate_arglist (exp,
11618 fcode == BUILT_IN_MEMSET_CHK
11619 ? INTEGER_TYPE : POINTER_TYPE,
11620 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11623 dest = CALL_EXPR_ARG (exp, 0);
11624 src = CALL_EXPR_ARG (exp, 1);
11625 len = CALL_EXPR_ARG (exp, 2);
11626 size = CALL_EXPR_ARG (exp, 3);
11628 if (! host_integerp (size, 1))
11631 if (host_integerp (len, 1) || integer_all_onesp (size))
11635 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11637 warning_at (tree_nonartificial_location (exp),
11638 0, "%Kcall to %D will always overflow destination buffer",
11639 exp, get_callee_fndecl (exp));
11644 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11645 mem{cpy,pcpy,move,set} is available. */
11648 case BUILT_IN_MEMCPY_CHK:
11649 fn = built_in_decls[BUILT_IN_MEMCPY];
11651 case BUILT_IN_MEMPCPY_CHK:
11652 fn = built_in_decls[BUILT_IN_MEMPCPY];
11654 case BUILT_IN_MEMMOVE_CHK:
11655 fn = built_in_decls[BUILT_IN_MEMMOVE];
11657 case BUILT_IN_MEMSET_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMSET];
11667 fn = build_call_nofold (fn, 3, dest, src, len);
11668 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11669 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11670 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11672 else if (fcode == BUILT_IN_MEMSET_CHK)
11676 unsigned int dest_align
11677 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11679 /* If DEST is not a pointer type, call the normal function. */
11680 if (dest_align == 0)
11683 /* If SRC and DEST are the same (and not volatile), do nothing. */
11684 if (operand_equal_p (src, dest, 0))
11688 if (fcode != BUILT_IN_MEMPCPY_CHK)
11690 /* Evaluate and ignore LEN in case it has side-effects. */
11691 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11692 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11695 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11696 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11699 /* __memmove_chk special case. */
11700 if (fcode == BUILT_IN_MEMMOVE_CHK)
11702 unsigned int src_align
11703 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11705 if (src_align == 0)
11708 /* If src is categorized for a readonly section we can use
11709 normal __memcpy_chk. */
11710 if (readonly_data_expr (src))
11712 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11715 fn = build_call_nofold (fn, 4, dest, src, len, size);
11716 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11717 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11718 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11725 /* Emit warning if a buffer overflow is detected at compile time. */
11728 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11732 location_t loc = tree_nonartificial_location (exp);
11736 case BUILT_IN_STRCPY_CHK:
11737 case BUILT_IN_STPCPY_CHK:
11738 /* For __strcat_chk the warning will be emitted only if overflowing
11739 by at least strlen (dest) + 1 bytes. */
11740 case BUILT_IN_STRCAT_CHK:
11741 len = CALL_EXPR_ARG (exp, 1);
11742 size = CALL_EXPR_ARG (exp, 2);
11745 case BUILT_IN_STRNCAT_CHK:
11746 case BUILT_IN_STRNCPY_CHK:
11747 len = CALL_EXPR_ARG (exp, 2);
11748 size = CALL_EXPR_ARG (exp, 3);
11750 case BUILT_IN_SNPRINTF_CHK:
11751 case BUILT_IN_VSNPRINTF_CHK:
11752 len = CALL_EXPR_ARG (exp, 1);
11753 size = CALL_EXPR_ARG (exp, 3);
11756 gcc_unreachable ();
11762 if (! host_integerp (size, 1) || integer_all_onesp (size))
11767 len = c_strlen (len, 1);
11768 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11771 else if (fcode == BUILT_IN_STRNCAT_CHK)
11773 tree src = CALL_EXPR_ARG (exp, 1);
11774 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11776 src = c_strlen (src, 1);
11777 if (! src || ! host_integerp (src, 1))
11779 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11780 exp, get_callee_fndecl (exp));
11783 else if (tree_int_cst_lt (src, size))
11786 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11789 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11790 exp, get_callee_fndecl (exp));
11793 /* Emit warning if a buffer overflow is detected at compile time
11794 in __sprintf_chk/__vsprintf_chk calls. */
11797 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11799 tree size, len, fmt;
11800 const char *fmt_str;
11801 int nargs = call_expr_nargs (exp);
11803 /* Verify the required arguments in the original call. */
11807 size = CALL_EXPR_ARG (exp, 2);
11808 fmt = CALL_EXPR_ARG (exp, 3);
11810 if (! host_integerp (size, 1) || integer_all_onesp (size))
11813 /* Check whether the format is a literal string constant. */
11814 fmt_str = c_getstr (fmt);
11815 if (fmt_str == NULL)
11818 if (!init_target_chars ())
11821 /* If the format doesn't contain % args or %%, we know its size. */
11822 if (strchr (fmt_str, target_percent) == 0)
11823 len = build_int_cstu (size_type_node, strlen (fmt_str));
11824 /* If the format is "%s" and first ... argument is a string literal,
11826 else if (fcode == BUILT_IN_SPRINTF_CHK
11827 && strcmp (fmt_str, target_percent_s) == 0)
11833 arg = CALL_EXPR_ARG (exp, 4);
11834 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11837 len = c_strlen (arg, 1);
11838 if (!len || ! host_integerp (len, 1))
11844 if (! tree_int_cst_lt (len, size))
11845 warning_at (tree_nonartificial_location (exp),
11846 0, "%Kcall to %D will always overflow destination buffer",
11847 exp, get_callee_fndecl (exp));
11850 /* Emit warning if a free is called with address of a variable. */
11853 maybe_emit_free_warning (tree exp)
11855 tree arg = CALL_EXPR_ARG (exp, 0);
11858 if (TREE_CODE (arg) != ADDR_EXPR)
11861 arg = get_base_address (TREE_OPERAND (arg, 0));
11862 if (arg == NULL || INDIRECT_REF_P (arg))
11865 if (SSA_VAR_P (arg))
11866 warning_at (tree_nonartificial_location (exp),
11867 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11869 warning_at (tree_nonartificial_location (exp),
11870 0, "%Kattempt to free a non-heap object", exp);
11873 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11877 fold_builtin_object_size (tree ptr, tree ost)
11879 tree ret = NULL_TREE;
11880 int object_size_type;
11882 if (!validate_arg (ptr, POINTER_TYPE)
11883 || !validate_arg (ost, INTEGER_TYPE))
11888 if (TREE_CODE (ost) != INTEGER_CST
11889 || tree_int_cst_sgn (ost) < 0
11890 || compare_tree_int (ost, 3) > 0)
11893 object_size_type = tree_low_cst (ost, 0);
11895 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11896 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11897 and (size_t) 0 for types 2 and 3. */
11898 if (TREE_SIDE_EFFECTS (ptr))
11899 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11901 if (TREE_CODE (ptr) == ADDR_EXPR)
11902 ret = build_int_cstu (size_type_node,
11903 compute_builtin_object_size (ptr, object_size_type));
11905 else if (TREE_CODE (ptr) == SSA_NAME)
11907 unsigned HOST_WIDE_INT bytes;
11909 /* If object size is not known yet, delay folding until
11910 later. Maybe subsequent passes will help determining
11912 bytes = compute_builtin_object_size (ptr, object_size_type);
11913 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11915 ret = build_int_cstu (size_type_node, bytes);
11920 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11921 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11922 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11929 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11930 DEST, SRC, LEN, and SIZE are the arguments to the call.
11931 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11932 code of the builtin. If MAXLEN is not NULL, it is maximum length
11933 passed as third argument. */
11936 fold_builtin_memory_chk (location_t loc, tree fndecl,
11937 tree dest, tree src, tree len, tree size,
11938 tree maxlen, bool ignore,
11939 enum built_in_function fcode)
11943 if (!validate_arg (dest, POINTER_TYPE)
11944 || !validate_arg (src,
11945 (fcode == BUILT_IN_MEMSET_CHK
11946 ? INTEGER_TYPE : POINTER_TYPE))
11947 || !validate_arg (len, INTEGER_TYPE)
11948 || !validate_arg (size, INTEGER_TYPE))
11951 /* If SRC and DEST are the same (and not volatile), return DEST
11952 (resp. DEST+LEN for __mempcpy_chk). */
11953 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11955 if (fcode != BUILT_IN_MEMPCPY_CHK)
11956 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11960 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11962 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11966 if (! host_integerp (size, 1))
11969 if (! integer_all_onesp (size))
11971 if (! host_integerp (len, 1))
11973 /* If LEN is not constant, try MAXLEN too.
11974 For MAXLEN only allow optimizing into non-_ocs function
11975 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11976 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11978 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11980 /* (void) __mempcpy_chk () can be optimized into
11981 (void) __memcpy_chk (). */
11982 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11986 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11994 if (tree_int_cst_lt (size, maxlen))
11999 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12000 mem{cpy,pcpy,move,set} is available. */
12003 case BUILT_IN_MEMCPY_CHK:
12004 fn = built_in_decls[BUILT_IN_MEMCPY];
12006 case BUILT_IN_MEMPCPY_CHK:
12007 fn = built_in_decls[BUILT_IN_MEMPCPY];
12009 case BUILT_IN_MEMMOVE_CHK:
12010 fn = built_in_decls[BUILT_IN_MEMMOVE];
12012 case BUILT_IN_MEMSET_CHK:
12013 fn = built_in_decls[BUILT_IN_MEMSET];
12022 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12025 /* Fold a call to the __st[rp]cpy_chk builtin.
12026 DEST, SRC, and SIZE are the arguments to the call.
12027 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12028 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12029 strings passed as second argument. */
12032 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12033 tree src, tree size,
12034 tree maxlen, bool ignore,
12035 enum built_in_function fcode)
12039 if (!validate_arg (dest, POINTER_TYPE)
12040 || !validate_arg (src, POINTER_TYPE)
12041 || !validate_arg (size, INTEGER_TYPE))
12044 /* If SRC and DEST are the same (and not volatile), return DEST. */
12045 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12046 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12048 if (! host_integerp (size, 1))
12051 if (! integer_all_onesp (size))
12053 len = c_strlen (src, 1);
12054 if (! len || ! host_integerp (len, 1))
12056 /* If LEN is not constant, try MAXLEN too.
12057 For MAXLEN only allow optimizing into non-_ocs function
12058 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12059 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12061 if (fcode == BUILT_IN_STPCPY_CHK)
12066 /* If return value of __stpcpy_chk is ignored,
12067 optimize into __strcpy_chk. */
12068 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12072 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12075 if (! len || TREE_SIDE_EFFECTS (len))
12078 /* If c_strlen returned something, but not a constant,
12079 transform __strcpy_chk into __memcpy_chk. */
12080 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12084 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12085 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12086 build_call_expr_loc (loc, fn, 4,
12087 dest, src, len, size));
12093 if (! tree_int_cst_lt (maxlen, size))
12097 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12098 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12099 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12103 return build_call_expr_loc (loc, fn, 2, dest, src);
12106 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12107 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12108 length passed as third argument. */
12111 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12112 tree len, tree size, tree maxlen)
12116 if (!validate_arg (dest, POINTER_TYPE)
12117 || !validate_arg (src, POINTER_TYPE)
12118 || !validate_arg (len, INTEGER_TYPE)
12119 || !validate_arg (size, INTEGER_TYPE))
12122 if (! host_integerp (size, 1))
12125 if (! integer_all_onesp (size))
12127 if (! host_integerp (len, 1))
12129 /* If LEN is not constant, try MAXLEN too.
12130 For MAXLEN only allow optimizing into non-_ocs function
12131 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12132 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12138 if (tree_int_cst_lt (size, maxlen))
12142 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12143 fn = built_in_decls[BUILT_IN_STRNCPY];
12147 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12150 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12151 are the arguments to the call. */
12154 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12155 tree src, tree size)
12160 if (!validate_arg (dest, POINTER_TYPE)
12161 || !validate_arg (src, POINTER_TYPE)
12162 || !validate_arg (size, INTEGER_TYPE))
12165 p = c_getstr (src);
12166 /* If the SRC parameter is "", return DEST. */
12167 if (p && *p == '\0')
12168 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12170 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12173 /* If __builtin_strcat_chk is used, assume strcat is available. */
12174 fn = built_in_decls[BUILT_IN_STRCAT];
12178 return build_call_expr_loc (loc, fn, 2, dest, src);
12181 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12185 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12186 tree dest, tree src, tree len, tree size)
12191 if (!validate_arg (dest, POINTER_TYPE)
12192 || !validate_arg (src, POINTER_TYPE)
12193 || !validate_arg (size, INTEGER_TYPE)
12194 || !validate_arg (size, INTEGER_TYPE))
12197 p = c_getstr (src);
12198 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12199 if (p && *p == '\0')
12200 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12201 else if (integer_zerop (len))
12202 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12204 if (! host_integerp (size, 1))
12207 if (! integer_all_onesp (size))
12209 tree src_len = c_strlen (src, 1);
12211 && host_integerp (src_len, 1)
12212 && host_integerp (len, 1)
12213 && ! tree_int_cst_lt (len, src_len))
12215 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12216 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12220 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12225 /* If __builtin_strncat_chk is used, assume strncat is available. */
12226 fn = built_in_decls[BUILT_IN_STRNCAT];
12230 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12233 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12234 a normal call should be emitted rather than expanding the function
12235 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12238 fold_builtin_sprintf_chk (location_t loc, tree exp,
12239 enum built_in_function fcode)
12241 tree dest, size, len, fn, fmt, flag;
12242 const char *fmt_str;
12243 int nargs = call_expr_nargs (exp);
12245 /* Verify the required arguments in the original call. */
12248 dest = CALL_EXPR_ARG (exp, 0);
12249 if (!validate_arg (dest, POINTER_TYPE))
12251 flag = CALL_EXPR_ARG (exp, 1);
12252 if (!validate_arg (flag, INTEGER_TYPE))
12254 size = CALL_EXPR_ARG (exp, 2);
12255 if (!validate_arg (size, INTEGER_TYPE))
12257 fmt = CALL_EXPR_ARG (exp, 3);
12258 if (!validate_arg (fmt, POINTER_TYPE))
12261 if (! host_integerp (size, 1))
12266 if (!init_target_chars ())
12269 /* Check whether the format is a literal string constant. */
12270 fmt_str = c_getstr (fmt);
12271 if (fmt_str != NULL)
12273 /* If the format doesn't contain % args or %%, we know the size. */
12274 if (strchr (fmt_str, target_percent) == 0)
12276 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12277 len = build_int_cstu (size_type_node, strlen (fmt_str));
12279 /* If the format is "%s" and first ... argument is a string literal,
12280 we know the size too. */
12281 else if (fcode == BUILT_IN_SPRINTF_CHK
12282 && strcmp (fmt_str, target_percent_s) == 0)
12288 arg = CALL_EXPR_ARG (exp, 4);
12289 if (validate_arg (arg, POINTER_TYPE))
12291 len = c_strlen (arg, 1);
12292 if (! len || ! host_integerp (len, 1))
12299 if (! integer_all_onesp (size))
12301 if (! len || ! tree_int_cst_lt (len, size))
12305 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12306 or if format doesn't contain % chars or is "%s". */
12307 if (! integer_zerop (flag))
12309 if (fmt_str == NULL)
12311 if (strchr (fmt_str, target_percent) != NULL
12312 && strcmp (fmt_str, target_percent_s))
12316 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12317 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12318 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12322 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12325 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12326 a normal call should be emitted rather than expanding the function
12327 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12328 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12329 passed as second argument. */
12332 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12333 enum built_in_function fcode)
12335 tree dest, size, len, fn, fmt, flag;
12336 const char *fmt_str;
12338 /* Verify the required arguments in the original call. */
12339 if (call_expr_nargs (exp) < 5)
12341 dest = CALL_EXPR_ARG (exp, 0);
12342 if (!validate_arg (dest, POINTER_TYPE))
12344 len = CALL_EXPR_ARG (exp, 1);
12345 if (!validate_arg (len, INTEGER_TYPE))
12347 flag = CALL_EXPR_ARG (exp, 2);
12348 if (!validate_arg (flag, INTEGER_TYPE))
12350 size = CALL_EXPR_ARG (exp, 3);
12351 if (!validate_arg (size, INTEGER_TYPE))
12353 fmt = CALL_EXPR_ARG (exp, 4);
12354 if (!validate_arg (fmt, POINTER_TYPE))
12357 if (! host_integerp (size, 1))
12360 if (! integer_all_onesp (size))
12362 if (! host_integerp (len, 1))
12364 /* If LEN is not constant, try MAXLEN too.
12365 For MAXLEN only allow optimizing into non-_ocs function
12366 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12367 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12373 if (tree_int_cst_lt (size, maxlen))
12377 if (!init_target_chars ())
12380 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12381 or if format doesn't contain % chars or is "%s". */
12382 if (! integer_zerop (flag))
12384 fmt_str = c_getstr (fmt);
12385 if (fmt_str == NULL)
12387 if (strchr (fmt_str, target_percent) != NULL
12388 && strcmp (fmt_str, target_percent_s))
12392 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12394 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12395 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12399 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12402 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12403 FMT and ARG are the arguments to the call; we don't fold cases with
12404 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12406 Return NULL_TREE if no simplification was possible, otherwise return the
12407 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12408 code of the function to be simplified. */
12411 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12412 tree arg, bool ignore,
12413 enum built_in_function fcode)
12415 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12416 const char *fmt_str = NULL;
12418 /* If the return value is used, don't do the transformation. */
12422 /* Verify the required arguments in the original call. */
12423 if (!validate_arg (fmt, POINTER_TYPE))
12426 /* Check whether the format is a literal string constant. */
12427 fmt_str = c_getstr (fmt);
12428 if (fmt_str == NULL)
12431 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12433 /* If we're using an unlocked function, assume the other
12434 unlocked functions exist explicitly. */
12435 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12436 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12440 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12441 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12444 if (!init_target_chars ())
12447 if (strcmp (fmt_str, target_percent_s) == 0
12448 || strchr (fmt_str, target_percent) == NULL)
12452 if (strcmp (fmt_str, target_percent_s) == 0)
12454 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12457 if (!arg || !validate_arg (arg, POINTER_TYPE))
12460 str = c_getstr (arg);
12466 /* The format specifier doesn't contain any '%' characters. */
12467 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12473 /* If the string was "", printf does nothing. */
12474 if (str[0] == '\0')
12475 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12477 /* If the string has length of 1, call putchar. */
12478 if (str[1] == '\0')
12480 /* Given printf("c"), (where c is any one character,)
12481 convert "c"[0] to an int and pass that to the replacement
12483 newarg = build_int_cst (NULL_TREE, str[0]);
12485 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12489 /* If the string was "string\n", call puts("string"). */
12490 size_t len = strlen (str);
12491 if ((unsigned char)str[len - 1] == target_newline)
12493 /* Create a NUL-terminated string that's one char shorter
12494 than the original, stripping off the trailing '\n'. */
12495 char *newstr = XALLOCAVEC (char, len);
12496 memcpy (newstr, str, len - 1);
12497 newstr[len - 1] = 0;
12499 newarg = build_string_literal (len, newstr);
12501 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12504 /* We'd like to arrange to call fputs(string,stdout) here,
12505 but we need stdout and don't have a way to get it yet. */
12510 /* The other optimizations can be done only on the non-va_list variants. */
12511 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12514 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12515 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12517 if (!arg || !validate_arg (arg, POINTER_TYPE))
12520 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12523 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12524 else if (strcmp (fmt_str, target_percent_c) == 0)
12526 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12529 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12535 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12538 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12539 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12540 more than 3 arguments, and ARG may be null in the 2-argument case.
12542 Return NULL_TREE if no simplification was possible, otherwise return the
12543 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12544 code of the function to be simplified. */
12547 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12548 tree fmt, tree arg, bool ignore,
12549 enum built_in_function fcode)
12551 tree fn_fputc, fn_fputs, call = NULL_TREE;
12552 const char *fmt_str = NULL;
12554 /* If the return value is used, don't do the transformation. */
12558 /* Verify the required arguments in the original call. */
12559 if (!validate_arg (fp, POINTER_TYPE))
12561 if (!validate_arg (fmt, POINTER_TYPE))
12564 /* Check whether the format is a literal string constant. */
12565 fmt_str = c_getstr (fmt);
12566 if (fmt_str == NULL)
12569 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12571 /* If we're using an unlocked function, assume the other
12572 unlocked functions exist explicitly. */
12573 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12574 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12578 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12579 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12582 if (!init_target_chars ())
12585 /* If the format doesn't contain % args or %%, use strcpy. */
12586 if (strchr (fmt_str, target_percent) == NULL)
12588 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12592 /* If the format specifier was "", fprintf does nothing. */
12593 if (fmt_str[0] == '\0')
12595 /* If FP has side-effects, just wait until gimplification is
12597 if (TREE_SIDE_EFFECTS (fp))
12600 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12603 /* When "string" doesn't contain %, replace all cases of
12604 fprintf (fp, string) with fputs (string, fp). The fputs
12605 builtin will take care of special cases like length == 1. */
12607 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12610 /* The other optimizations can be done only on the non-va_list variants. */
12611 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12614 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12615 else if (strcmp (fmt_str, target_percent_s) == 0)
12617 if (!arg || !validate_arg (arg, POINTER_TYPE))
12620 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12623 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12624 else if (strcmp (fmt_str, target_percent_c) == 0)
12626 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12629 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12634 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12637 /* Initialize format string characters in the target charset. */
12640 init_target_chars (void)
12645 target_newline = lang_hooks.to_target_charset ('\n');
12646 target_percent = lang_hooks.to_target_charset ('%');
12647 target_c = lang_hooks.to_target_charset ('c');
12648 target_s = lang_hooks.to_target_charset ('s');
12649 if (target_newline == 0 || target_percent == 0 || target_c == 0
12653 target_percent_c[0] = target_percent;
12654 target_percent_c[1] = target_c;
12655 target_percent_c[2] = '\0';
12657 target_percent_s[0] = target_percent;
12658 target_percent_s[1] = target_s;
12659 target_percent_s[2] = '\0';
12661 target_percent_s_newline[0] = target_percent;
12662 target_percent_s_newline[1] = target_s;
12663 target_percent_s_newline[2] = target_newline;
12664 target_percent_s_newline[3] = '\0';
12671 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12672 and no overflow/underflow occurred. INEXACT is true if M was not
12673 exactly calculated. TYPE is the tree type for the result. This
12674 function assumes that you cleared the MPFR flags and then
12675 calculated M to see if anything subsequently set a flag prior to
12676 entering this function. Return NULL_TREE if any checks fail. */
12679 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12681 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12682 overflow/underflow occurred. If -frounding-math, proceed iff the
12683 result of calling FUNC was exact. */
12684 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12685 && (!flag_rounding_math || !inexact))
12687 REAL_VALUE_TYPE rr;
12689 real_from_mpfr (&rr, m, type, GMP_RNDN);
12690 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12691 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12692 but the mpft_t is not, then we underflowed in the
12694 if (real_isfinite (&rr)
12695 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12697 REAL_VALUE_TYPE rmode;
12699 real_convert (&rmode, TYPE_MODE (type), &rr);
12700 /* Proceed iff the specified mode can hold the value. */
12701 if (real_identical (&rmode, &rr))
12702 return build_real (type, rmode);
12708 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12709 number and no overflow/underflow occurred. INEXACT is true if M
12710 was not exactly calculated. TYPE is the tree type for the result.
12711 This function assumes that you cleared the MPFR flags and then
12712 calculated M to see if anything subsequently set a flag prior to
12713 entering this function. Return NULL_TREE if any checks fail, if
12714 FORCE_CONVERT is true, then bypass the checks. */
12717 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12719 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12720 overflow/underflow occurred. If -frounding-math, proceed iff the
12721 result of calling FUNC was exact. */
12723 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12724 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12725 && (!flag_rounding_math || !inexact)))
12727 REAL_VALUE_TYPE re, im;
12729 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12730 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12731 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12732 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12733 but the mpft_t is not, then we underflowed in the
12736 || (real_isfinite (&re) && real_isfinite (&im)
12737 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12738 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12740 REAL_VALUE_TYPE re_mode, im_mode;
12742 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12743 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12744 /* Proceed iff the specified mode can hold the value. */
12746 || (real_identical (&re_mode, &re)
12747 && real_identical (&im_mode, &im)))
12748 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12749 build_real (TREE_TYPE (type), im_mode));
12755 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12756 FUNC on it and return the resulting value as a tree with type TYPE.
12757 If MIN and/or MAX are not NULL, then the supplied ARG must be
12758 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12759 acceptable values, otherwise they are not. The mpfr precision is
12760 set to the precision of TYPE. We assume that function FUNC returns
12761 zero if the result could be calculated exactly within the requested
12765 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12766 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12769 tree result = NULL_TREE;
12773 /* To proceed, MPFR must exactly represent the target floating point
12774 format, which only happens when the target base equals two. */
12775 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12776 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12778 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12780 if (real_isfinite (ra)
12781 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12782 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12784 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12785 const int prec = fmt->p;
12786 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12790 mpfr_init2 (m, prec);
12791 mpfr_from_real (m, ra, GMP_RNDN);
12792 mpfr_clear_flags ();
12793 inexact = func (m, m, rnd);
12794 result = do_mpfr_ckconv (m, type, inexact);
12802 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12803 FUNC on it and return the resulting value as a tree with type TYPE.
12804 The mpfr precision is set to the precision of TYPE. We assume that
12805 function FUNC returns zero if the result could be calculated
12806 exactly within the requested precision. */
12809 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12810 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12812 tree result = NULL_TREE;
12817 /* To proceed, MPFR must exactly represent the target floating point
12818 format, which only happens when the target base equals two. */
12819 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12820 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12821 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12823 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12824 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12826 if (real_isfinite (ra1) && real_isfinite (ra2))
12828 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12829 const int prec = fmt->p;
12830 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12834 mpfr_inits2 (prec, m1, m2, NULL);
12835 mpfr_from_real (m1, ra1, GMP_RNDN);
12836 mpfr_from_real (m2, ra2, GMP_RNDN);
12837 mpfr_clear_flags ();
12838 inexact = func (m1, m1, m2, rnd);
12839 result = do_mpfr_ckconv (m1, type, inexact);
12840 mpfr_clears (m1, m2, NULL);
12847 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12848 FUNC on it and return the resulting value as a tree with type TYPE.
12849 The mpfr precision is set to the precision of TYPE. We assume that
12850 function FUNC returns zero if the result could be calculated
12851 exactly within the requested precision. */
12854 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12855 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12857 tree result = NULL_TREE;
12863 /* To proceed, MPFR must exactly represent the target floating point
12864 format, which only happens when the target base equals two. */
12865 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12866 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12867 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12868 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12870 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12871 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12872 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12874 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12876 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12877 const int prec = fmt->p;
12878 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12882 mpfr_inits2 (prec, m1, m2, m3, NULL);
12883 mpfr_from_real (m1, ra1, GMP_RNDN);
12884 mpfr_from_real (m2, ra2, GMP_RNDN);
12885 mpfr_from_real (m3, ra3, GMP_RNDN);
12886 mpfr_clear_flags ();
12887 inexact = func (m1, m1, m2, m3, rnd);
12888 result = do_mpfr_ckconv (m1, type, inexact);
12889 mpfr_clears (m1, m2, m3, NULL);
12896 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12897 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12898 If ARG_SINP and ARG_COSP are NULL then the result is returned
12899 as a complex value.
12900 The type is taken from the type of ARG and is used for setting the
12901 precision of the calculation and results. */
12904 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12906 tree const type = TREE_TYPE (arg);
12907 tree result = NULL_TREE;
12911 /* To proceed, MPFR must exactly represent the target floating point
12912 format, which only happens when the target base equals two. */
12913 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12914 && TREE_CODE (arg) == REAL_CST
12915 && !TREE_OVERFLOW (arg))
12917 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12919 if (real_isfinite (ra))
12921 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12922 const int prec = fmt->p;
12923 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12924 tree result_s, result_c;
12928 mpfr_inits2 (prec, m, ms, mc, NULL);
12929 mpfr_from_real (m, ra, GMP_RNDN);
12930 mpfr_clear_flags ();
12931 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12932 result_s = do_mpfr_ckconv (ms, type, inexact);
12933 result_c = do_mpfr_ckconv (mc, type, inexact);
12934 mpfr_clears (m, ms, mc, NULL);
12935 if (result_s && result_c)
12937 /* If we are to return in a complex value do so. */
12938 if (!arg_sinp && !arg_cosp)
12939 return build_complex (build_complex_type (type),
12940 result_c, result_s);
12942 /* Dereference the sin/cos pointer arguments. */
12943 arg_sinp = build_fold_indirect_ref (arg_sinp);
12944 arg_cosp = build_fold_indirect_ref (arg_cosp);
12945 /* Proceed if valid pointer type were passed in. */
12946 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12947 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12949 /* Set the values. */
12950 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12952 TREE_SIDE_EFFECTS (result_s) = 1;
12953 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12955 TREE_SIDE_EFFECTS (result_c) = 1;
12956 /* Combine the assignments into a compound expr. */
12957 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12958 result_s, result_c));
12966 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12967 two-argument mpfr order N Bessel function FUNC on them and return
12968 the resulting value as a tree with type TYPE. The mpfr precision
12969 is set to the precision of TYPE. We assume that function FUNC
12970 returns zero if the result could be calculated exactly within the
12971 requested precision. */
12973 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12974 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12975 const REAL_VALUE_TYPE *min, bool inclusive)
12977 tree result = NULL_TREE;
12982 /* To proceed, MPFR must exactly represent the target floating point
12983 format, which only happens when the target base equals two. */
12984 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12985 && host_integerp (arg1, 0)
12986 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12988 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12989 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12992 && real_isfinite (ra)
12993 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12995 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12996 const int prec = fmt->p;
12997 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13001 mpfr_init2 (m, prec);
13002 mpfr_from_real (m, ra, GMP_RNDN);
13003 mpfr_clear_flags ();
13004 inexact = func (m, n, m, rnd);
13005 result = do_mpfr_ckconv (m, type, inexact);
13013 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13014 the pointer *(ARG_QUO) and return the result. The type is taken
13015 from the type of ARG0 and is used for setting the precision of the
13016 calculation and results. */
13019 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13021 tree const type = TREE_TYPE (arg0);
13022 tree result = NULL_TREE;
13027 /* To proceed, MPFR must exactly represent the target floating point
13028 format, which only happens when the target base equals two. */
13029 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13030 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13031 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13033 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13034 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13036 if (real_isfinite (ra0) && real_isfinite (ra1))
13038 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13039 const int prec = fmt->p;
13040 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13045 mpfr_inits2 (prec, m0, m1, NULL);
13046 mpfr_from_real (m0, ra0, GMP_RNDN);
13047 mpfr_from_real (m1, ra1, GMP_RNDN);
13048 mpfr_clear_flags ();
13049 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13050 /* Remquo is independent of the rounding mode, so pass
13051 inexact=0 to do_mpfr_ckconv(). */
13052 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13053 mpfr_clears (m0, m1, NULL);
13056 /* MPFR calculates quo in the host's long so it may
13057 return more bits in quo than the target int can hold
13058 if sizeof(host long) > sizeof(target int). This can
13059 happen even for native compilers in LP64 mode. In
13060 these cases, modulo the quo value with the largest
13061 number that the target int can hold while leaving one
13062 bit for the sign. */
13063 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13064 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13066 /* Dereference the quo pointer argument. */
13067 arg_quo = build_fold_indirect_ref (arg_quo);
13068 /* Proceed iff a valid pointer type was passed in. */
13069 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13071 /* Set the value. */
13072 tree result_quo = fold_build2 (MODIFY_EXPR,
13073 TREE_TYPE (arg_quo), arg_quo,
13074 build_int_cst (NULL, integer_quo));
13075 TREE_SIDE_EFFECTS (result_quo) = 1;
13076 /* Combine the quo assignment with the rem. */
13077 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13078 result_quo, result_rem));
13086 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13087 resulting value as a tree with type TYPE. The mpfr precision is
13088 set to the precision of TYPE. We assume that this mpfr function
13089 returns zero if the result could be calculated exactly within the
13090 requested precision. In addition, the integer pointer represented
13091 by ARG_SG will be dereferenced and set to the appropriate signgam
13095 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13097 tree result = NULL_TREE;
13101 /* To proceed, MPFR must exactly represent the target floating point
13102 format, which only happens when the target base equals two. Also
13103 verify ARG is a constant and that ARG_SG is an int pointer. */
13104 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13105 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13106 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13107 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13109 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13111 /* In addition to NaN and Inf, the argument cannot be zero or a
13112 negative integer. */
13113 if (real_isfinite (ra)
13114 && ra->cl != rvc_zero
13115 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13117 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13118 const int prec = fmt->p;
13119 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13124 mpfr_init2 (m, prec);
13125 mpfr_from_real (m, ra, GMP_RNDN);
13126 mpfr_clear_flags ();
13127 inexact = mpfr_lgamma (m, &sg, m, rnd);
13128 result_lg = do_mpfr_ckconv (m, type, inexact);
13134 /* Dereference the arg_sg pointer argument. */
13135 arg_sg = build_fold_indirect_ref (arg_sg);
13136 /* Assign the signgam value into *arg_sg. */
13137 result_sg = fold_build2 (MODIFY_EXPR,
13138 TREE_TYPE (arg_sg), arg_sg,
13139 build_int_cst (NULL, sg));
13140 TREE_SIDE_EFFECTS (result_sg) = 1;
13141 /* Combine the signgam assignment with the lgamma result. */
13142 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13143 result_sg, result_lg));
13151 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13152 function FUNC on it and return the resulting value as a tree with
13153 type TYPE. The mpfr precision is set to the precision of TYPE. We
13154 assume that function FUNC returns zero if the result could be
13155 calculated exactly within the requested precision. */
13158 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13160 tree result = NULL_TREE;
13164 /* To proceed, MPFR must exactly represent the target floating point
13165 format, which only happens when the target base equals two. */
13166 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13167 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13168 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13170 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13171 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13173 if (real_isfinite (re) && real_isfinite (im))
13175 const struct real_format *const fmt =
13176 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13177 const int prec = fmt->p;
13178 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13179 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13183 mpc_init2 (m, prec);
13184 mpfr_from_real (mpc_realref(m), re, rnd);
13185 mpfr_from_real (mpc_imagref(m), im, rnd);
13186 mpfr_clear_flags ();
13187 inexact = func (m, m, crnd);
13188 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13196 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13197 mpc function FUNC on it and return the resulting value as a tree
13198 with type TYPE. The mpfr precision is set to the precision of
13199 TYPE. We assume that function FUNC returns zero if the result
13200 could be calculated exactly within the requested precision. If
13201 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13202 in the arguments and/or results. */
13205 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13206 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13208 tree result = NULL_TREE;
13213 /* To proceed, MPFR must exactly represent the target floating point
13214 format, which only happens when the target base equals two. */
13215 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13216 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13217 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13218 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13219 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13221 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13222 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13223 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13224 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13227 || (real_isfinite (re0) && real_isfinite (im0)
13228 && real_isfinite (re1) && real_isfinite (im1)))
13230 const struct real_format *const fmt =
13231 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13232 const int prec = fmt->p;
13233 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13234 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13238 mpc_init2 (m0, prec);
13239 mpc_init2 (m1, prec);
13240 mpfr_from_real (mpc_realref(m0), re0, rnd);
13241 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13242 mpfr_from_real (mpc_realref(m1), re1, rnd);
13243 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13244 mpfr_clear_flags ();
13245 inexact = func (m0, m0, m1, crnd);
13246 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13256 The functions below provide an alternate interface for folding
13257 builtin function calls presented as GIMPLE_CALL statements rather
13258 than as CALL_EXPRs. The folded result is still expressed as a
13259 tree. There is too much code duplication in the handling of
13260 varargs functions, and a more intrusive re-factoring would permit
13261 better sharing of code between the tree and statement-based
13262 versions of these functions. */
13264 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13265 along with N new arguments specified as the "..." parameters. SKIP
13266 is the number of arguments in STMT to be omitted. This function is used
13267 to do varargs-to-varargs transformations. */
13270 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13272 int oldnargs = gimple_call_num_args (stmt);
13273 int nargs = oldnargs - skip + n;
13274 tree fntype = TREE_TYPE (fndecl);
13275 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13279 location_t loc = gimple_location (stmt);
13281 buffer = XALLOCAVEC (tree, nargs);
13283 for (i = 0; i < n; i++)
13284 buffer[i] = va_arg (ap, tree);
13286 for (j = skip; j < oldnargs; j++, i++)
13287 buffer[i] = gimple_call_arg (stmt, j);
13289 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13292 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13293 a normal call should be emitted rather than expanding the function
13294 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13297 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13299 tree dest, size, len, fn, fmt, flag;
13300 const char *fmt_str;
13301 int nargs = gimple_call_num_args (stmt);
13303 /* Verify the required arguments in the original call. */
13306 dest = gimple_call_arg (stmt, 0);
13307 if (!validate_arg (dest, POINTER_TYPE))
13309 flag = gimple_call_arg (stmt, 1);
13310 if (!validate_arg (flag, INTEGER_TYPE))
13312 size = gimple_call_arg (stmt, 2);
13313 if (!validate_arg (size, INTEGER_TYPE))
13315 fmt = gimple_call_arg (stmt, 3);
13316 if (!validate_arg (fmt, POINTER_TYPE))
13319 if (! host_integerp (size, 1))
13324 if (!init_target_chars ())
13327 /* Check whether the format is a literal string constant. */
13328 fmt_str = c_getstr (fmt);
13329 if (fmt_str != NULL)
13331 /* If the format doesn't contain % args or %%, we know the size. */
13332 if (strchr (fmt_str, target_percent) == 0)
13334 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13335 len = build_int_cstu (size_type_node, strlen (fmt_str));
13337 /* If the format is "%s" and first ... argument is a string literal,
13338 we know the size too. */
13339 else if (fcode == BUILT_IN_SPRINTF_CHK
13340 && strcmp (fmt_str, target_percent_s) == 0)
13346 arg = gimple_call_arg (stmt, 4);
13347 if (validate_arg (arg, POINTER_TYPE))
13349 len = c_strlen (arg, 1);
13350 if (! len || ! host_integerp (len, 1))
13357 if (! integer_all_onesp (size))
13359 if (! len || ! tree_int_cst_lt (len, size))
13363 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13364 or if format doesn't contain % chars or is "%s". */
13365 if (! integer_zerop (flag))
13367 if (fmt_str == NULL)
13369 if (strchr (fmt_str, target_percent) != NULL
13370 && strcmp (fmt_str, target_percent_s))
13374 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13375 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13376 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13380 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13383 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13384 a normal call should be emitted rather than expanding the function
13385 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13386 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13387 passed as second argument. */
13390 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13391 enum built_in_function fcode)
13393 tree dest, size, len, fn, fmt, flag;
13394 const char *fmt_str;
13396 /* Verify the required arguments in the original call. */
13397 if (gimple_call_num_args (stmt) < 5)
13399 dest = gimple_call_arg (stmt, 0);
13400 if (!validate_arg (dest, POINTER_TYPE))
13402 len = gimple_call_arg (stmt, 1);
13403 if (!validate_arg (len, INTEGER_TYPE))
13405 flag = gimple_call_arg (stmt, 2);
13406 if (!validate_arg (flag, INTEGER_TYPE))
13408 size = gimple_call_arg (stmt, 3);
13409 if (!validate_arg (size, INTEGER_TYPE))
13411 fmt = gimple_call_arg (stmt, 4);
13412 if (!validate_arg (fmt, POINTER_TYPE))
13415 if (! host_integerp (size, 1))
13418 if (! integer_all_onesp (size))
13420 if (! host_integerp (len, 1))
13422 /* If LEN is not constant, try MAXLEN too.
13423 For MAXLEN only allow optimizing into non-_ocs function
13424 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13425 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13431 if (tree_int_cst_lt (size, maxlen))
13435 if (!init_target_chars ())
13438 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13439 or if format doesn't contain % chars or is "%s". */
13440 if (! integer_zerop (flag))
13442 fmt_str = c_getstr (fmt);
13443 if (fmt_str == NULL)
13445 if (strchr (fmt_str, target_percent) != NULL
13446 && strcmp (fmt_str, target_percent_s))
13450 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13452 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13453 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13457 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13460 /* Builtins with folding operations that operate on "..." arguments
13461 need special handling; we need to store the arguments in a convenient
13462 data structure before attempting any folding. Fortunately there are
13463 only a few builtins that fall into this category. FNDECL is the
13464 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13465 result of the function call is ignored. */
13468 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13469 bool ignore ATTRIBUTE_UNUSED)
13471 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13472 tree ret = NULL_TREE;
13476 case BUILT_IN_SPRINTF_CHK:
13477 case BUILT_IN_VSPRINTF_CHK:
13478 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13481 case BUILT_IN_SNPRINTF_CHK:
13482 case BUILT_IN_VSNPRINTF_CHK:
13483 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13490 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13491 TREE_NO_WARNING (ret) = 1;
13497 /* A wrapper function for builtin folding that prevents warnings for
13498 "statement without effect" and the like, caused by removing the
13499 call node earlier than the warning is generated. */
13502 fold_call_stmt (gimple stmt, bool ignore)
13504 tree ret = NULL_TREE;
13505 tree fndecl = gimple_call_fndecl (stmt);
13506 location_t loc = gimple_location (stmt);
13508 && TREE_CODE (fndecl) == FUNCTION_DECL
13509 && DECL_BUILT_IN (fndecl)
13510 && !gimple_call_va_arg_pack_p (stmt))
13512 int nargs = gimple_call_num_args (stmt);
13514 if (avoid_folding_inline_builtin (fndecl))
13516 /* FIXME: Don't use a list in this interface. */
13517 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13519 tree arglist = NULL_TREE;
13521 for (i = nargs - 1; i >= 0; i--)
13522 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13523 return targetm.fold_builtin (fndecl, arglist, ignore);
13527 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13529 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13531 for (i = 0; i < nargs; i++)
13532 args[i] = gimple_call_arg (stmt, i);
13533 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13536 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13539 /* Propagate location information from original call to
13540 expansion of builtin. Otherwise things like
13541 maybe_emit_chk_warning, that operate on the expansion
13542 of a builtin, will use the wrong location information. */
13543 if (gimple_has_location (stmt))
13545 tree realret = ret;
13546 if (TREE_CODE (ret) == NOP_EXPR)
13547 realret = TREE_OPERAND (ret, 0);
13548 if (CAN_HAVE_LOCATION_P (realret)
13549 && !EXPR_HAS_LOCATION (realret))
13550 SET_EXPR_LOCATION (realret, loc);
13560 /* Look up the function in built_in_decls that corresponds to DECL
13561 and set ASMSPEC as its user assembler name. DECL must be a
13562 function decl that declares a builtin. */
13565 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13568 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13569 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13572 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13573 set_user_assembler_name (builtin, asmspec);
13574 switch (DECL_FUNCTION_CODE (decl))
13576 case BUILT_IN_MEMCPY:
13577 init_block_move_fn (asmspec);
13578 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13580 case BUILT_IN_MEMSET:
13581 init_block_clear_fn (asmspec);
13582 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13584 case BUILT_IN_MEMMOVE:
13585 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13587 case BUILT_IN_MEMCMP:
13588 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13590 case BUILT_IN_ABORT:
13591 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);