1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
438 if (TREE_CODE (src) == COND_EXPR
439 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
443 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
444 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
445 if (tree_int_cst_equal (len1, len2))
449 if (TREE_CODE (src) == COMPOUND_EXPR
450 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
451 return c_strlen (TREE_OPERAND (src, 1), only_value);
453 src = string_constant (src, &offset_node);
457 max = TREE_STRING_LENGTH (src) - 1;
458 ptr = TREE_STRING_POINTER (src);
460 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
462 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
463 compute the offset to the following null if we don't know where to
464 start searching for it. */
467 for (i = 0; i < max; i++)
471 /* We don't know the starting offset, but we do know that the string
472 has no internal zero bytes. We can assume that the offset falls
473 within the bounds of the string; otherwise, the programmer deserves
474 what he gets. Subtract the offset from the length of the string,
475 and return that. This would perhaps not be valid if we were dealing
476 with named arrays in addition to literal string constants. */
478 return size_diffop_loc (input_location, size_int (max), offset_node);
481 /* We have a known offset into the string. Start searching there for
482 a null character if we can represent it as a single HOST_WIDE_INT. */
483 if (offset_node == 0)
485 else if (! host_integerp (offset_node, 0))
488 offset = tree_low_cst (offset_node, 0);
490 /* If the offset is known to be out of bounds, warn, and call strlen at
492 if (offset < 0 || offset > max)
494 /* Suppress multiple warnings for propagated constant strings. */
495 if (! TREE_NO_WARNING (src))
497 warning (0, "offset outside bounds of constant string");
498 TREE_NO_WARNING (src) = 1;
503 /* Use strlen to search for the first zero byte. Since any strings
504 constructed with build_string will have nulls appended, we win even
505 if we get handed something like (char[4])"abcd".
507 Since OFFSET is our starting index into the string, no further
508 calculation is needed. */
509 return ssize_int (strlen (ptr + offset));
512 /* Return a char pointer for a C string if it is a string constant
513 or sum of string constant and integer constant. */
520 src = string_constant (src, &offset_node);
524 if (offset_node == 0)
525 return TREE_STRING_POINTER (src);
526 else if (!host_integerp (offset_node, 1)
527 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
530 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
533 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
534 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
537 c_readstr (const char *str, enum machine_mode mode)
543 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
548 for (i = 0; i < GET_MODE_SIZE (mode); i++)
551 if (WORDS_BIG_ENDIAN)
552 j = GET_MODE_SIZE (mode) - i - 1;
553 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
554 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
555 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
557 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
560 ch = (unsigned char) str[i];
561 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
563 return immed_double_const (c[0], c[1], mode);
566 /* Cast a target constant CST to target CHAR and if that value fits into
567 host char type, return zero and put that value into variable pointed to by
571 target_char_cast (tree cst, char *p)
573 unsigned HOST_WIDE_INT val, hostval;
575 if (!host_integerp (cst, 1)
576 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
579 val = tree_low_cst (cst, 1);
580 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
581 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
584 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
585 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
594 /* Similar to save_expr, but assumes that arbitrary code is not executed
595 in between the multiple evaluations. In particular, we assume that a
596 non-addressable local variable will not be modified. */
599 builtin_save_expr (tree exp)
601 if (TREE_ADDRESSABLE (exp) == 0
602 && (TREE_CODE (exp) == PARM_DECL
603 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
606 return save_expr (exp);
609 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
610 times to get the address of either a higher stack frame, or a return
611 address located within it (depending on FNDECL_CODE). */
614 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
618 #ifdef INITIAL_FRAME_ADDRESS_RTX
619 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
623 /* For a zero count with __builtin_return_address, we don't care what
624 frame address we return, because target-specific definitions will
625 override us. Therefore frame pointer elimination is OK, and using
626 the soft frame pointer is OK.
628 For a nonzero count, or a zero count with __builtin_frame_address,
629 we require a stable offset from the current frame pointer to the
630 previous one, so we must use the hard frame pointer, and
631 we must disable frame pointer elimination. */
632 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
633 tem = frame_pointer_rtx;
636 tem = hard_frame_pointer_rtx;
638 /* Tell reload not to eliminate the frame pointer. */
639 crtl->accesses_prior_frames = 1;
643 /* Some machines need special handling before we can access
644 arbitrary frames. For example, on the SPARC, we must first flush
645 all register windows to the stack. */
646 #ifdef SETUP_FRAME_ADDRESSES
648 SETUP_FRAME_ADDRESSES ();
651 /* On the SPARC, the return address is not in the frame, it is in a
652 register. There is no way to access it off of the current frame
653 pointer, but it can be accessed off the previous frame pointer by
654 reading the value from the register window save area. */
655 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
656 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
660 /* Scan back COUNT frames to the specified frame. */
661 for (i = 0; i < count; i++)
663 /* Assume the dynamic chain pointer is in the word that the
664 frame address points to, unless otherwise specified. */
665 #ifdef DYNAMIC_CHAIN_ADDRESS
666 tem = DYNAMIC_CHAIN_ADDRESS (tem);
668 tem = memory_address (Pmode, tem);
669 tem = gen_frame_mem (Pmode, tem);
670 tem = copy_to_reg (tem);
673 /* For __builtin_frame_address, return what we've got. But, on
674 the SPARC for example, we may have to add a bias. */
675 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
676 #ifdef FRAME_ADDR_RTX
677 return FRAME_ADDR_RTX (tem);
682 /* For __builtin_return_address, get the return address from that frame. */
683 #ifdef RETURN_ADDR_RTX
684 tem = RETURN_ADDR_RTX (count, tem);
686 tem = memory_address (Pmode,
687 plus_constant (tem, GET_MODE_SIZE (Pmode)));
688 tem = gen_frame_mem (Pmode, tem);
693 /* Alias set used for setjmp buffer. */
694 static alias_set_type setjmp_alias_set = -1;
696 /* Construct the leading half of a __builtin_setjmp call. Control will
697 return to RECEIVER_LABEL. This is also called directly by the SJLJ
698 exception handling code. */
701 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
703 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
707 if (setjmp_alias_set == -1)
708 setjmp_alias_set = new_alias_set ();
710 buf_addr = convert_memory_address (Pmode, buf_addr);
712 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
714 /* We store the frame pointer and the address of receiver_label in
715 the buffer and use the rest of it for the stack save area, which
716 is machine-dependent. */
718 mem = gen_rtx_MEM (Pmode, buf_addr);
719 set_mem_alias_set (mem, setjmp_alias_set);
720 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
722 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
723 set_mem_alias_set (mem, setjmp_alias_set);
725 emit_move_insn (validize_mem (mem),
726 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
728 stack_save = gen_rtx_MEM (sa_mode,
729 plus_constant (buf_addr,
730 2 * GET_MODE_SIZE (Pmode)));
731 set_mem_alias_set (stack_save, setjmp_alias_set);
732 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
734 /* If there is further processing to do, do it. */
735 #ifdef HAVE_builtin_setjmp_setup
736 if (HAVE_builtin_setjmp_setup)
737 emit_insn (gen_builtin_setjmp_setup (buf_addr));
740 /* Tell optimize_save_area_alloca that extra work is going to
741 need to go on during alloca. */
742 cfun->calls_setjmp = 1;
744 /* We have a nonlocal label. */
745 cfun->has_nonlocal_label = 1;
748 /* Construct the trailing part of a __builtin_setjmp call. This is
749 also called directly by the SJLJ exception handling code. */
752 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
756 /* Clobber the FP when we get here, so we have to make sure it's
757 marked as used by this function. */
758 emit_use (hard_frame_pointer_rtx);
760 /* Mark the static chain as clobbered here so life information
761 doesn't get messed up for it. */
762 chain = targetm.calls.static_chain (current_function_decl, true);
763 if (chain && REG_P (chain))
764 emit_clobber (chain);
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
781 #ifdef ELIMINABLE_REGS
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
790 if (i == ARRAY_SIZE (elim_regs))
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 /* DRAP is needed for stack realign if longjmp is expanded to current
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
838 buf_addr = convert_memory_address (Pmode, buf_addr);
840 buf_addr = force_reg (Pmode, buf_addr);
842 /* We require that the user must pass a second argument of 1, because
843 that is what builtin_setjmp will return. */
844 gcc_assert (value == const1_rtx);
846 last = get_last_insn ();
847 #ifdef HAVE_builtin_longjmp
848 if (HAVE_builtin_longjmp)
849 emit_insn (gen_builtin_longjmp (buf_addr));
853 fp = gen_rtx_MEM (Pmode, buf_addr);
854 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
855 GET_MODE_SIZE (Pmode)));
857 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (fp, setjmp_alias_set);
860 set_mem_alias_set (lab, setjmp_alias_set);
861 set_mem_alias_set (stack, setjmp_alias_set);
863 /* Pick up FP, label, and SP from the block and jump. This code is
864 from expand_goto in stmt.c; see there for detailed comments. */
865 #ifdef HAVE_nonlocal_goto
866 if (HAVE_nonlocal_goto)
867 /* We have to pass a value to the nonlocal_goto pattern that will
868 get copied into the static_chain pointer, but it does not matter
869 what that value is, because builtin_setjmp does not use it. */
870 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
874 lab = copy_to_reg (lab);
876 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
877 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
879 emit_move_insn (hard_frame_pointer_rtx, fp);
880 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
882 emit_use (hard_frame_pointer_rtx);
883 emit_use (stack_pointer_rtx);
884 emit_indirect_jump (lab);
888 /* Search backwards and mark the jump insn as a non-local goto.
889 Note that this precludes the use of __builtin_longjmp to a
890 __builtin_setjmp target in the same function. However, we've
891 already cautioned the user that these functions are for
892 internal exception handling use only. */
893 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
895 gcc_assert (insn != last);
899 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
902 else if (CALL_P (insn))
907 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
908 and the address of the save area. */
911 expand_builtin_nonlocal_goto (tree exp)
913 tree t_label, t_save_area;
914 rtx r_label, r_save_area, r_fp, r_sp, insn;
916 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
919 t_label = CALL_EXPR_ARG (exp, 0);
920 t_save_area = CALL_EXPR_ARG (exp, 1);
922 r_label = expand_normal (t_label);
923 r_label = convert_memory_address (Pmode, r_label);
924 r_save_area = expand_normal (t_save_area);
925 r_save_area = convert_memory_address (Pmode, r_save_area);
926 /* Copy the address of the save location to a register just in case it was based
927 on the frame pointer. */
928 r_save_area = copy_to_reg (r_save_area);
929 r_fp = gen_rtx_MEM (Pmode, r_save_area);
930 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
931 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
933 crtl->has_nonlocal_goto = 1;
935 #ifdef HAVE_nonlocal_goto
936 /* ??? We no longer need to pass the static chain value, afaik. */
937 if (HAVE_nonlocal_goto)
938 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
942 r_label = copy_to_reg (r_label);
944 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
945 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
947 /* Restore frame pointer for containing function.
948 This sets the actual hard register used for the frame pointer
949 to the location of the function's incoming static chain info.
950 The non-local goto handler will then adjust it to contain the
951 proper value and reload the argument pointer, if needed. */
952 emit_move_insn (hard_frame_pointer_rtx, r_fp);
953 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
955 /* USE of hard_frame_pointer_rtx added for consistency;
956 not clear if really needed. */
957 emit_use (hard_frame_pointer_rtx);
958 emit_use (stack_pointer_rtx);
960 /* If the architecture is using a GP register, we must
961 conservatively assume that the target function makes use of it.
962 The prologue of functions with nonlocal gotos must therefore
963 initialize the GP register to the appropriate value, and we
964 must then make sure that this value is live at the point
965 of the jump. (Note that this doesn't necessarily apply
966 to targets with a nonlocal_goto pattern; they are free
967 to implement it in their own way. Note also that this is
968 a no-op if the GP register is a global invariant.) */
969 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
970 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
971 emit_use (pic_offset_table_rtx);
973 emit_indirect_jump (r_label);
976 /* Search backwards to the jump insn and mark it as a
978 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
982 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
985 else if (CALL_P (insn))
992 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
993 (not all will be used on all machines) that was passed to __builtin_setjmp.
994 It updates the stack pointer in that block to correspond to the current
998 expand_builtin_update_setjmp_buf (rtx buf_addr)
1000 enum machine_mode sa_mode = Pmode;
1004 #ifdef HAVE_save_stack_nonlocal
1005 if (HAVE_save_stack_nonlocal)
1006 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1008 #ifdef STACK_SAVEAREA_MODE
1009 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1013 = gen_rtx_MEM (sa_mode,
1016 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1020 emit_insn (gen_setjmp ());
1023 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1026 /* Expand a call to __builtin_prefetch. For a target that does not support
1027 data prefetch, evaluate the memory address argument in case it has side
1031 expand_builtin_prefetch (tree exp)
1033 tree arg0, arg1, arg2;
1037 if (!validate_arglist (exp, POINTER_TYPE, 0))
1040 arg0 = CALL_EXPR_ARG (exp, 0);
1042 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1043 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1045 nargs = call_expr_nargs (exp);
1047 arg1 = CALL_EXPR_ARG (exp, 1);
1049 arg1 = integer_zero_node;
1051 arg2 = CALL_EXPR_ARG (exp, 2);
1053 arg2 = build_int_cst (NULL_TREE, 3);
1055 /* Argument 0 is an address. */
1056 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1058 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1059 if (TREE_CODE (arg1) != INTEGER_CST)
1061 error ("second argument to %<__builtin_prefetch%> must be a constant");
1062 arg1 = integer_zero_node;
1064 op1 = expand_normal (arg1);
1065 /* Argument 1 must be either zero or one. */
1066 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1068 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1073 /* Argument 2 (locality) must be a compile-time constant int. */
1074 if (TREE_CODE (arg2) != INTEGER_CST)
1076 error ("third argument to %<__builtin_prefetch%> must be a constant");
1077 arg2 = integer_zero_node;
1079 op2 = expand_normal (arg2);
1080 /* Argument 2 must be 0, 1, 2, or 3. */
1081 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1083 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1087 #ifdef HAVE_prefetch
1090 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1092 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1093 || (GET_MODE (op0) != Pmode))
1095 op0 = convert_memory_address (Pmode, op0);
1096 op0 = force_reg (Pmode, op0);
1098 emit_insn (gen_prefetch (op0, op1, op2));
1102 /* Don't do anything with direct references to volatile memory, but
1103 generate code to handle other side effects. */
1104 if (!MEM_P (op0) && side_effects_p (op0))
1108 /* Get a MEM rtx for expression EXP which is the address of an operand
1109 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1110 the maximum length of the block of memory that might be accessed or
1114 get_memory_rtx (tree exp, tree len)
1116 tree orig_exp = exp;
1120 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1121 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1122 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1123 exp = TREE_OPERAND (exp, 0);
1125 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1126 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1128 /* Get an expression we can use to find the attributes to assign to MEM.
1129 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1130 we can. First remove any nops. */
1131 while (CONVERT_EXPR_P (exp)
1132 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1133 exp = TREE_OPERAND (exp, 0);
1136 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1137 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1138 && host_integerp (TREE_OPERAND (exp, 1), 0)
1139 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1140 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1141 else if (TREE_CODE (exp) == ADDR_EXPR)
1142 exp = TREE_OPERAND (exp, 0);
1143 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1144 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1148 /* Honor attributes derived from exp, except for the alias set
1149 (as builtin stringops may alias with anything) and the size
1150 (as stringops may access multiple array elements). */
1153 set_mem_attributes (mem, exp, 0);
1156 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1158 /* Allow the string and memory builtins to overflow from one
1159 field into another, see http://gcc.gnu.org/PR23561.
1160 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1161 memory accessed by the string or memory builtin will fit
1162 within the field. */
1163 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1165 tree mem_expr = MEM_EXPR (mem);
1166 HOST_WIDE_INT offset = -1, length = -1;
1169 while (TREE_CODE (inner) == ARRAY_REF
1170 || CONVERT_EXPR_P (inner)
1171 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1172 || TREE_CODE (inner) == SAVE_EXPR)
1173 inner = TREE_OPERAND (inner, 0);
1175 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1177 if (MEM_OFFSET (mem)
1178 && CONST_INT_P (MEM_OFFSET (mem)))
1179 offset = INTVAL (MEM_OFFSET (mem));
1181 if (offset >= 0 && len && host_integerp (len, 0))
1182 length = tree_low_cst (len, 0);
1184 while (TREE_CODE (inner) == COMPONENT_REF)
1186 tree field = TREE_OPERAND (inner, 1);
1187 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1188 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1190 /* Bitfields are generally not byte-addressable. */
1191 gcc_assert (!DECL_BIT_FIELD (field)
1192 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1193 % BITS_PER_UNIT) == 0
1194 && host_integerp (DECL_SIZE (field), 0)
1195 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1196 % BITS_PER_UNIT) == 0));
1198 /* If we can prove that the memory starting at XEXP (mem, 0) and
1199 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1200 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1201 fields without DECL_SIZE_UNIT like flexible array members. */
1203 && DECL_SIZE_UNIT (field)
1204 && host_integerp (DECL_SIZE_UNIT (field), 0))
1207 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1210 && offset + length <= size)
1215 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1216 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1217 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1225 mem_expr = TREE_OPERAND (mem_expr, 0);
1226 inner = TREE_OPERAND (inner, 0);
1229 if (mem_expr == NULL)
1231 if (mem_expr != MEM_EXPR (mem))
1233 set_mem_expr (mem, mem_expr);
1234 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1237 set_mem_alias_set (mem, 0);
1238 set_mem_size (mem, NULL_RTX);
1244 /* Built-in functions to perform an untyped call and return. */
1246 /* For each register that may be used for calling a function, this
1247 gives a mode used to copy the register's value. VOIDmode indicates
1248 the register is not used for calling a function. If the machine
1249 has register windows, this gives only the outbound registers.
1250 INCOMING_REGNO gives the corresponding inbound register. */
1251 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1253 /* For each register that may be used for returning values, this gives
1254 a mode used to copy the register's value. VOIDmode indicates the
1255 register is not used for returning values. If the machine has
1256 register windows, this gives only the outbound registers.
1257 INCOMING_REGNO gives the corresponding inbound register. */
1258 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1260 /* Return the size required for the block returned by __builtin_apply_args,
1261 and initialize apply_args_mode. */
1264 apply_args_size (void)
1266 static int size = -1;
1269 enum machine_mode mode;
1271 /* The values computed by this function never change. */
1274 /* The first value is the incoming arg-pointer. */
1275 size = GET_MODE_SIZE (Pmode);
1277 /* The second value is the structure value address unless this is
1278 passed as an "invisible" first argument. */
1279 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1280 size += GET_MODE_SIZE (Pmode);
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if (FUNCTION_ARG_REGNO_P (regno))
1285 mode = reg_raw_mode[regno];
1287 gcc_assert (mode != VOIDmode);
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 size += GET_MODE_SIZE (mode);
1293 apply_args_mode[regno] = mode;
1297 apply_args_mode[regno] = VOIDmode;
1303 /* Return the size required for the block returned by __builtin_apply,
1304 and initialize apply_result_mode. */
1307 apply_result_size (void)
1309 static int size = -1;
1311 enum machine_mode mode;
1313 /* The values computed by this function never change. */
1318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1319 if (FUNCTION_VALUE_REGNO_P (regno))
1321 mode = reg_raw_mode[regno];
1323 gcc_assert (mode != VOIDmode);
1325 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1326 if (size % align != 0)
1327 size = CEIL (size, align) * align;
1328 size += GET_MODE_SIZE (mode);
1329 apply_result_mode[regno] = mode;
1332 apply_result_mode[regno] = VOIDmode;
1334 /* Allow targets that use untyped_call and untyped_return to override
1335 the size so that machine-specific information can be stored here. */
1336 #ifdef APPLY_RESULT_SIZE
1337 size = APPLY_RESULT_SIZE;
1343 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1344 /* Create a vector describing the result block RESULT. If SAVEP is true,
1345 the result block is used to save the values; otherwise it is used to
1346 restore the values. */
1349 result_vector (int savep, rtx result)
1351 int regno, size, align, nelts;
1352 enum machine_mode mode;
1354 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1357 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1358 if ((mode = apply_result_mode[regno]) != VOIDmode)
1360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1361 if (size % align != 0)
1362 size = CEIL (size, align) * align;
1363 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1364 mem = adjust_address (result, mode, size);
1365 savevec[nelts++] = (savep
1366 ? gen_rtx_SET (VOIDmode, mem, reg)
1367 : gen_rtx_SET (VOIDmode, reg, mem));
1368 size += GET_MODE_SIZE (mode);
1370 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1372 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1374 /* Save the state required to perform an untyped call with the same
1375 arguments as were passed to the current function. */
1378 expand_builtin_apply_args_1 (void)
1381 int size, align, regno;
1382 enum machine_mode mode;
1383 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1385 /* Create a block where the arg-pointer, structure value address,
1386 and argument registers can be saved. */
1387 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1389 /* Walk past the arg-pointer and structure value address. */
1390 size = GET_MODE_SIZE (Pmode);
1391 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1392 size += GET_MODE_SIZE (Pmode);
1394 /* Save each register used in calling a function to the block. */
1395 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1396 if ((mode = apply_args_mode[regno]) != VOIDmode)
1398 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1399 if (size % align != 0)
1400 size = CEIL (size, align) * align;
1402 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1404 emit_move_insn (adjust_address (registers, mode, size), tem);
1405 size += GET_MODE_SIZE (mode);
1408 /* Save the arg pointer to the block. */
1409 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1410 #ifdef STACK_GROWS_DOWNWARD
1411 /* We need the pointer as the caller actually passed them to us, not
1412 as we might have pretended they were passed. Make sure it's a valid
1413 operand, as emit_move_insn isn't expected to handle a PLUS. */
1415 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1418 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1420 size = GET_MODE_SIZE (Pmode);
1422 /* Save the structure value address unless this is passed as an
1423 "invisible" first argument. */
1424 if (struct_incoming_value)
1426 emit_move_insn (adjust_address (registers, Pmode, size),
1427 copy_to_reg (struct_incoming_value));
1428 size += GET_MODE_SIZE (Pmode);
1431 /* Return the address of the block. */
1432 return copy_addr_to_reg (XEXP (registers, 0));
1435 /* __builtin_apply_args returns block of memory allocated on
1436 the stack into which is stored the arg pointer, structure
1437 value address, static chain, and all the registers that might
1438 possibly be used in performing a function call. The code is
1439 moved to the start of the function so the incoming values are
1443 expand_builtin_apply_args (void)
1445 /* Don't do __builtin_apply_args more than once in a function.
1446 Save the result of the first call and reuse it. */
1447 if (apply_args_value != 0)
1448 return apply_args_value;
1450 /* When this function is called, it means that registers must be
1451 saved on entry to this function. So we migrate the
1452 call to the first insn of this function. */
1457 temp = expand_builtin_apply_args_1 ();
1461 apply_args_value = temp;
1463 /* Put the insns after the NOTE that starts the function.
1464 If this is inside a start_sequence, make the outer-level insn
1465 chain current, so the code is placed at the start of the
1466 function. If internal_arg_pointer is a non-virtual pseudo,
1467 it needs to be placed after the function that initializes
1469 push_topmost_sequence ();
1470 if (REG_P (crtl->args.internal_arg_pointer)
1471 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1472 emit_insn_before (seq, parm_birth_insn);
1474 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1475 pop_topmost_sequence ();
1480 /* Perform an untyped call and save the state required to perform an
1481 untyped return of whatever value was returned by the given function. */
1484 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1486 int size, align, regno;
1487 enum machine_mode mode;
1488 rtx incoming_args, result, reg, dest, src, call_insn;
1489 rtx old_stack_level = 0;
1490 rtx call_fusage = 0;
1491 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1493 arguments = convert_memory_address (Pmode, arguments);
1495 /* Create a block where the return registers can be saved. */
1496 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1498 /* Fetch the arg pointer from the ARGUMENTS block. */
1499 incoming_args = gen_reg_rtx (Pmode);
1500 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1501 #ifndef STACK_GROWS_DOWNWARD
1502 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1503 incoming_args, 0, OPTAB_LIB_WIDEN);
1506 /* Push a new argument block and copy the arguments. Do not allow
1507 the (potential) memcpy call below to interfere with our stack
1509 do_pending_stack_adjust ();
1512 /* Save the stack with nonlocal if available. */
1513 #ifdef HAVE_save_stack_nonlocal
1514 if (HAVE_save_stack_nonlocal)
1515 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1518 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1520 /* Allocate a block of memory onto the stack and copy the memory
1521 arguments to the outgoing arguments address. */
1522 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1524 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1525 may have already set current_function_calls_alloca to true.
1526 current_function_calls_alloca won't be set if argsize is zero,
1527 so we have to guarantee need_drap is true here. */
1528 if (SUPPORTS_STACK_ALIGNMENT)
1529 crtl->need_drap = true;
1531 dest = virtual_outgoing_args_rtx;
1532 #ifndef STACK_GROWS_DOWNWARD
1533 if (CONST_INT_P (argsize))
1534 dest = plus_constant (dest, -INTVAL (argsize));
1536 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1538 dest = gen_rtx_MEM (BLKmode, dest);
1539 set_mem_align (dest, PARM_BOUNDARY);
1540 src = gen_rtx_MEM (BLKmode, incoming_args);
1541 set_mem_align (src, PARM_BOUNDARY);
1542 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1544 /* Refer to the argument block. */
1546 arguments = gen_rtx_MEM (BLKmode, arguments);
1547 set_mem_align (arguments, PARM_BOUNDARY);
1549 /* Walk past the arg-pointer and structure value address. */
1550 size = GET_MODE_SIZE (Pmode);
1552 size += GET_MODE_SIZE (Pmode);
1554 /* Restore each of the registers previously saved. Make USE insns
1555 for each of these registers for use in making the call. */
1556 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1557 if ((mode = apply_args_mode[regno]) != VOIDmode)
1559 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1560 if (size % align != 0)
1561 size = CEIL (size, align) * align;
1562 reg = gen_rtx_REG (mode, regno);
1563 emit_move_insn (reg, adjust_address (arguments, mode, size));
1564 use_reg (&call_fusage, reg);
1565 size += GET_MODE_SIZE (mode);
1568 /* Restore the structure value address unless this is passed as an
1569 "invisible" first argument. */
1570 size = GET_MODE_SIZE (Pmode);
1573 rtx value = gen_reg_rtx (Pmode);
1574 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1575 emit_move_insn (struct_value, value);
1576 if (REG_P (struct_value))
1577 use_reg (&call_fusage, struct_value);
1578 size += GET_MODE_SIZE (Pmode);
1581 /* All arguments and registers used for the call are set up by now! */
1582 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1584 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1585 and we don't want to load it into a register as an optimization,
1586 because prepare_call_address already did it if it should be done. */
1587 if (GET_CODE (function) != SYMBOL_REF)
1588 function = memory_address (FUNCTION_MODE, function);
1590 /* Generate the actual call instruction and save the return value. */
1591 #ifdef HAVE_untyped_call
1592 if (HAVE_untyped_call)
1593 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1594 result, result_vector (1, result)));
1597 #ifdef HAVE_call_value
1598 if (HAVE_call_value)
1602 /* Locate the unique return register. It is not possible to
1603 express a call that sets more than one return register using
1604 call_value; use untyped_call for that. In fact, untyped_call
1605 only needs to save the return registers in the given block. */
1606 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1607 if ((mode = apply_result_mode[regno]) != VOIDmode)
1609 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1611 valreg = gen_rtx_REG (mode, regno);
1614 emit_call_insn (GEN_CALL_VALUE (valreg,
1615 gen_rtx_MEM (FUNCTION_MODE, function),
1616 const0_rtx, NULL_RTX, const0_rtx));
1618 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1624 /* Find the CALL insn we just emitted, and attach the register usage
1626 call_insn = last_call_insn ();
1627 add_function_usage_to (call_insn, call_fusage);
1629 /* Restore the stack. */
1630 #ifdef HAVE_save_stack_nonlocal
1631 if (HAVE_save_stack_nonlocal)
1632 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1635 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1639 /* Return the address of the result block. */
1640 result = copy_addr_to_reg (XEXP (result, 0));
1641 return convert_memory_address (ptr_mode, result);
1644 /* Perform an untyped return. */
1647 expand_builtin_return (rtx result)
1649 int size, align, regno;
1650 enum machine_mode mode;
1652 rtx call_fusage = 0;
1654 result = convert_memory_address (Pmode, result);
1656 apply_result_size ();
1657 result = gen_rtx_MEM (BLKmode, result);
1659 #ifdef HAVE_untyped_return
1660 if (HAVE_untyped_return)
1662 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1668 /* Restore the return value and note that each value is used. */
1670 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1671 if ((mode = apply_result_mode[regno]) != VOIDmode)
1673 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1674 if (size % align != 0)
1675 size = CEIL (size, align) * align;
1676 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1677 emit_move_insn (reg, adjust_address (result, mode, size));
1679 push_to_sequence (call_fusage);
1681 call_fusage = get_insns ();
1683 size += GET_MODE_SIZE (mode);
1686 /* Put the USE insns before the return. */
1687 emit_insn (call_fusage);
1689 /* Return whatever values was restored by jumping directly to the end
1691 expand_naked_return ();
1694 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1696 static enum type_class
1697 type_to_class (tree type)
1699 switch (TREE_CODE (type))
1701 case VOID_TYPE: return void_type_class;
1702 case INTEGER_TYPE: return integer_type_class;
1703 case ENUMERAL_TYPE: return enumeral_type_class;
1704 case BOOLEAN_TYPE: return boolean_type_class;
1705 case POINTER_TYPE: return pointer_type_class;
1706 case REFERENCE_TYPE: return reference_type_class;
1707 case OFFSET_TYPE: return offset_type_class;
1708 case REAL_TYPE: return real_type_class;
1709 case COMPLEX_TYPE: return complex_type_class;
1710 case FUNCTION_TYPE: return function_type_class;
1711 case METHOD_TYPE: return method_type_class;
1712 case RECORD_TYPE: return record_type_class;
1714 case QUAL_UNION_TYPE: return union_type_class;
1715 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1716 ? string_type_class : array_type_class);
1717 case LANG_TYPE: return lang_type_class;
1718 default: return no_type_class;
1722 /* Expand a call EXP to __builtin_classify_type. */
1725 expand_builtin_classify_type (tree exp)
1727 if (call_expr_nargs (exp))
1728 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1729 return GEN_INT (no_type_class);
1732 /* This helper macro, meant to be used in mathfn_built_in below,
1733 determines which among a set of three builtin math functions is
1734 appropriate for a given type mode. The `F' and `L' cases are
1735 automatically generated from the `double' case. */
1736 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1737 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1738 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1739 fcodel = BUILT_IN_MATHFN##L ; break;
1740 /* Similar to above, but appends _R after any F/L suffix. */
1741 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1742 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1743 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1744 fcodel = BUILT_IN_MATHFN##L_R ; break;
1746 /* Return mathematic function equivalent to FN but operating directly
1747 on TYPE, if available. If IMPLICIT is true find the function in
1748 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1749 can't do the conversion, return zero. */
1752 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1754 tree const *const fn_arr
1755 = implicit ? implicit_built_in_decls : built_in_decls;
1756 enum built_in_function fcode, fcodef, fcodel;
1760 CASE_MATHFN (BUILT_IN_ACOS)
1761 CASE_MATHFN (BUILT_IN_ACOSH)
1762 CASE_MATHFN (BUILT_IN_ASIN)
1763 CASE_MATHFN (BUILT_IN_ASINH)
1764 CASE_MATHFN (BUILT_IN_ATAN)
1765 CASE_MATHFN (BUILT_IN_ATAN2)
1766 CASE_MATHFN (BUILT_IN_ATANH)
1767 CASE_MATHFN (BUILT_IN_CBRT)
1768 CASE_MATHFN (BUILT_IN_CEIL)
1769 CASE_MATHFN (BUILT_IN_CEXPI)
1770 CASE_MATHFN (BUILT_IN_COPYSIGN)
1771 CASE_MATHFN (BUILT_IN_COS)
1772 CASE_MATHFN (BUILT_IN_COSH)
1773 CASE_MATHFN (BUILT_IN_DREM)
1774 CASE_MATHFN (BUILT_IN_ERF)
1775 CASE_MATHFN (BUILT_IN_ERFC)
1776 CASE_MATHFN (BUILT_IN_EXP)
1777 CASE_MATHFN (BUILT_IN_EXP10)
1778 CASE_MATHFN (BUILT_IN_EXP2)
1779 CASE_MATHFN (BUILT_IN_EXPM1)
1780 CASE_MATHFN (BUILT_IN_FABS)
1781 CASE_MATHFN (BUILT_IN_FDIM)
1782 CASE_MATHFN (BUILT_IN_FLOOR)
1783 CASE_MATHFN (BUILT_IN_FMA)
1784 CASE_MATHFN (BUILT_IN_FMAX)
1785 CASE_MATHFN (BUILT_IN_FMIN)
1786 CASE_MATHFN (BUILT_IN_FMOD)
1787 CASE_MATHFN (BUILT_IN_FREXP)
1788 CASE_MATHFN (BUILT_IN_GAMMA)
1789 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1790 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1791 CASE_MATHFN (BUILT_IN_HYPOT)
1792 CASE_MATHFN (BUILT_IN_ILOGB)
1793 CASE_MATHFN (BUILT_IN_INF)
1794 CASE_MATHFN (BUILT_IN_ISINF)
1795 CASE_MATHFN (BUILT_IN_J0)
1796 CASE_MATHFN (BUILT_IN_J1)
1797 CASE_MATHFN (BUILT_IN_JN)
1798 CASE_MATHFN (BUILT_IN_LCEIL)
1799 CASE_MATHFN (BUILT_IN_LDEXP)
1800 CASE_MATHFN (BUILT_IN_LFLOOR)
1801 CASE_MATHFN (BUILT_IN_LGAMMA)
1802 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1803 CASE_MATHFN (BUILT_IN_LLCEIL)
1804 CASE_MATHFN (BUILT_IN_LLFLOOR)
1805 CASE_MATHFN (BUILT_IN_LLRINT)
1806 CASE_MATHFN (BUILT_IN_LLROUND)
1807 CASE_MATHFN (BUILT_IN_LOG)
1808 CASE_MATHFN (BUILT_IN_LOG10)
1809 CASE_MATHFN (BUILT_IN_LOG1P)
1810 CASE_MATHFN (BUILT_IN_LOG2)
1811 CASE_MATHFN (BUILT_IN_LOGB)
1812 CASE_MATHFN (BUILT_IN_LRINT)
1813 CASE_MATHFN (BUILT_IN_LROUND)
1814 CASE_MATHFN (BUILT_IN_MODF)
1815 CASE_MATHFN (BUILT_IN_NAN)
1816 CASE_MATHFN (BUILT_IN_NANS)
1817 CASE_MATHFN (BUILT_IN_NEARBYINT)
1818 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1819 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1820 CASE_MATHFN (BUILT_IN_POW)
1821 CASE_MATHFN (BUILT_IN_POWI)
1822 CASE_MATHFN (BUILT_IN_POW10)
1823 CASE_MATHFN (BUILT_IN_REMAINDER)
1824 CASE_MATHFN (BUILT_IN_REMQUO)
1825 CASE_MATHFN (BUILT_IN_RINT)
1826 CASE_MATHFN (BUILT_IN_ROUND)
1827 CASE_MATHFN (BUILT_IN_SCALB)
1828 CASE_MATHFN (BUILT_IN_SCALBLN)
1829 CASE_MATHFN (BUILT_IN_SCALBN)
1830 CASE_MATHFN (BUILT_IN_SIGNBIT)
1831 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1832 CASE_MATHFN (BUILT_IN_SIN)
1833 CASE_MATHFN (BUILT_IN_SINCOS)
1834 CASE_MATHFN (BUILT_IN_SINH)
1835 CASE_MATHFN (BUILT_IN_SQRT)
1836 CASE_MATHFN (BUILT_IN_TAN)
1837 CASE_MATHFN (BUILT_IN_TANH)
1838 CASE_MATHFN (BUILT_IN_TGAMMA)
1839 CASE_MATHFN (BUILT_IN_TRUNC)
1840 CASE_MATHFN (BUILT_IN_Y0)
1841 CASE_MATHFN (BUILT_IN_Y1)
1842 CASE_MATHFN (BUILT_IN_YN)
1848 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1849 return fn_arr[fcode];
1850 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1851 return fn_arr[fcodef];
1852 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1853 return fn_arr[fcodel];
1858 /* Like mathfn_built_in_1(), but always use the implicit array. */
1861 mathfn_built_in (tree type, enum built_in_function fn)
1863 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1866 /* If errno must be maintained, expand the RTL to check if the result,
1867 TARGET, of a built-in function call, EXP, is NaN, and if so set
1871 expand_errno_check (tree exp, rtx target)
1873 rtx lab = gen_label_rtx ();
1875 /* Test the result; if it is NaN, set errno=EDOM because
1876 the argument was not in the domain. */
1877 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1878 NULL_RTX, NULL_RTX, lab);
1881 /* If this built-in doesn't throw an exception, set errno directly. */
1882 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1884 #ifdef GEN_ERRNO_RTX
1885 rtx errno_rtx = GEN_ERRNO_RTX;
1888 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1890 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1896 /* Make sure the library call isn't expanded as a tail call. */
1897 CALL_EXPR_TAILCALL (exp) = 0;
1899 /* We can't set errno=EDOM directly; let the library call do it.
1900 Pop the arguments right away in case the call gets deleted. */
1902 expand_call (exp, target, 0);
1907 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1908 Return NULL_RTX if a normal call should be emitted rather than expanding
1909 the function in-line. EXP is the expression that is a call to the builtin
1910 function; if convenient, the result should be placed in TARGET.
1911 SUBTARGET may be used as the target for computing one of EXP's operands. */
1914 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1916 optab builtin_optab;
1918 tree fndecl = get_callee_fndecl (exp);
1919 enum machine_mode mode;
1920 bool errno_set = false;
1923 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1926 arg = CALL_EXPR_ARG (exp, 0);
1928 switch (DECL_FUNCTION_CODE (fndecl))
1930 CASE_FLT_FN (BUILT_IN_SQRT):
1931 errno_set = ! tree_expr_nonnegative_p (arg);
1932 builtin_optab = sqrt_optab;
1934 CASE_FLT_FN (BUILT_IN_EXP):
1935 errno_set = true; builtin_optab = exp_optab; break;
1936 CASE_FLT_FN (BUILT_IN_EXP10):
1937 CASE_FLT_FN (BUILT_IN_POW10):
1938 errno_set = true; builtin_optab = exp10_optab; break;
1939 CASE_FLT_FN (BUILT_IN_EXP2):
1940 errno_set = true; builtin_optab = exp2_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXPM1):
1942 errno_set = true; builtin_optab = expm1_optab; break;
1943 CASE_FLT_FN (BUILT_IN_LOGB):
1944 errno_set = true; builtin_optab = logb_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOG):
1946 errno_set = true; builtin_optab = log_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG10):
1948 errno_set = true; builtin_optab = log10_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOG2):
1950 errno_set = true; builtin_optab = log2_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG1P):
1952 errno_set = true; builtin_optab = log1p_optab; break;
1953 CASE_FLT_FN (BUILT_IN_ASIN):
1954 builtin_optab = asin_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ACOS):
1956 builtin_optab = acos_optab; break;
1957 CASE_FLT_FN (BUILT_IN_TAN):
1958 builtin_optab = tan_optab; break;
1959 CASE_FLT_FN (BUILT_IN_ATAN):
1960 builtin_optab = atan_optab; break;
1961 CASE_FLT_FN (BUILT_IN_FLOOR):
1962 builtin_optab = floor_optab; break;
1963 CASE_FLT_FN (BUILT_IN_CEIL):
1964 builtin_optab = ceil_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TRUNC):
1966 builtin_optab = btrunc_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ROUND):
1968 builtin_optab = round_optab; break;
1969 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1970 builtin_optab = nearbyint_optab;
1971 if (flag_trapping_math)
1973 /* Else fallthrough and expand as rint. */
1974 CASE_FLT_FN (BUILT_IN_RINT):
1975 builtin_optab = rint_optab; break;
1976 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1977 builtin_optab = significand_optab; break;
1982 /* Make a suitable register to place result in. */
1983 mode = TYPE_MODE (TREE_TYPE (exp));
1985 if (! flag_errno_math || ! HONOR_NANS (mode))
1988 /* Before working hard, check whether the instruction is available. */
1989 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1991 target = gen_reg_rtx (mode);
1993 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1994 need to expand the argument again. This way, we will not perform
1995 side-effects more the once. */
1996 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1998 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2002 /* Compute into TARGET.
2003 Set TARGET to wherever the result comes back. */
2004 target = expand_unop (mode, builtin_optab, op0, target, 0);
2009 expand_errno_check (exp, target);
2011 /* Output the entire sequence. */
2012 insns = get_insns ();
2018 /* If we were unable to expand via the builtin, stop the sequence
2019 (without outputting the insns) and call to the library function
2020 with the stabilized argument list. */
2024 return expand_call (exp, target, target == const0_rtx);
2027 /* Expand a call to the builtin binary math functions (pow and atan2).
2028 Return NULL_RTX if a normal call should be emitted rather than expanding the
2029 function in-line. EXP is the expression that is a call to the builtin
2030 function; if convenient, the result should be placed in TARGET.
2031 SUBTARGET may be used as the target for computing one of EXP's
2035 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2037 optab builtin_optab;
2038 rtx op0, op1, insns;
2039 int op1_type = REAL_TYPE;
2040 tree fndecl = get_callee_fndecl (exp);
2042 enum machine_mode mode;
2043 bool errno_set = true;
2045 switch (DECL_FUNCTION_CODE (fndecl))
2047 CASE_FLT_FN (BUILT_IN_SCALBN):
2048 CASE_FLT_FN (BUILT_IN_SCALBLN):
2049 CASE_FLT_FN (BUILT_IN_LDEXP):
2050 op1_type = INTEGER_TYPE;
2055 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2058 arg0 = CALL_EXPR_ARG (exp, 0);
2059 arg1 = CALL_EXPR_ARG (exp, 1);
2061 switch (DECL_FUNCTION_CODE (fndecl))
2063 CASE_FLT_FN (BUILT_IN_POW):
2064 builtin_optab = pow_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN2):
2066 builtin_optab = atan2_optab; break;
2067 CASE_FLT_FN (BUILT_IN_SCALB):
2068 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2070 builtin_optab = scalb_optab; break;
2071 CASE_FLT_FN (BUILT_IN_SCALBN):
2072 CASE_FLT_FN (BUILT_IN_SCALBLN):
2073 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2075 /* Fall through... */
2076 CASE_FLT_FN (BUILT_IN_LDEXP):
2077 builtin_optab = ldexp_optab; break;
2078 CASE_FLT_FN (BUILT_IN_FMOD):
2079 builtin_optab = fmod_optab; break;
2080 CASE_FLT_FN (BUILT_IN_REMAINDER):
2081 CASE_FLT_FN (BUILT_IN_DREM):
2082 builtin_optab = remainder_optab; break;
2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
2090 /* Before working hard, check whether the instruction is available. */
2091 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 target = gen_reg_rtx (mode);
2096 if (! flag_errno_math || ! HONOR_NANS (mode))
2099 /* Always stabilize the argument list. */
2100 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2101 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2103 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2104 op1 = expand_normal (arg1);
2108 /* Compute into TARGET.
2109 Set TARGET to wherever the result comes back. */
2110 target = expand_binop (mode, builtin_optab, op0, op1,
2111 target, 0, OPTAB_DIRECT);
2113 /* If we were unable to expand via the builtin, stop the sequence
2114 (without outputting the insns) and call to the library function
2115 with the stabilized argument list. */
2119 return expand_call (exp, target, target == const0_rtx);
2123 expand_errno_check (exp, target);
2125 /* Output the entire sequence. */
2126 insns = get_insns ();
2133 /* Expand a call to the builtin sin and cos math functions.
2134 Return NULL_RTX if a normal call should be emitted rather than expanding the
2135 function in-line. EXP is the expression that is a call to the builtin
2136 function; if convenient, the result should be placed in TARGET.
2137 SUBTARGET may be used as the target for computing one of EXP's
2141 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2143 optab builtin_optab;
2145 tree fndecl = get_callee_fndecl (exp);
2146 enum machine_mode mode;
2149 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2152 arg = CALL_EXPR_ARG (exp, 0);
2154 switch (DECL_FUNCTION_CODE (fndecl))
2156 CASE_FLT_FN (BUILT_IN_SIN):
2157 CASE_FLT_FN (BUILT_IN_COS):
2158 builtin_optab = sincos_optab; break;
2163 /* Make a suitable register to place result in. */
2164 mode = TYPE_MODE (TREE_TYPE (exp));
2166 /* Check if sincos insn is available, otherwise fallback
2167 to sin or cos insn. */
2168 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2169 switch (DECL_FUNCTION_CODE (fndecl))
2171 CASE_FLT_FN (BUILT_IN_SIN):
2172 builtin_optab = sin_optab; break;
2173 CASE_FLT_FN (BUILT_IN_COS):
2174 builtin_optab = cos_optab; break;
2179 /* Before working hard, check whether the instruction is available. */
2180 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2182 target = gen_reg_rtx (mode);
2184 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2185 need to expand the argument again. This way, we will not perform
2186 side-effects more the once. */
2187 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2189 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2193 /* Compute into TARGET.
2194 Set TARGET to wherever the result comes back. */
2195 if (builtin_optab == sincos_optab)
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_SIN):
2202 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2204 CASE_FLT_FN (BUILT_IN_COS):
2205 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2210 gcc_assert (result);
2214 target = expand_unop (mode, builtin_optab, op0, target, 0);
2219 /* Output the entire sequence. */
2220 insns = get_insns ();
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2232 target = expand_call (exp, target, target == const0_rtx);
2237 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2238 return an RTL instruction code that implements the functionality.
2239 If that isn't possible or available return CODE_FOR_nothing. */
2241 static enum insn_code
2242 interclass_mathfn_icode (tree arg, tree fndecl)
2244 bool errno_set = false;
2245 optab builtin_optab = 0;
2246 enum machine_mode mode;
2248 switch (DECL_FUNCTION_CODE (fndecl))
2250 CASE_FLT_FN (BUILT_IN_ILOGB):
2251 errno_set = true; builtin_optab = ilogb_optab; break;
2252 CASE_FLT_FN (BUILT_IN_ISINF):
2253 builtin_optab = isinf_optab; break;
2254 case BUILT_IN_ISNORMAL:
2255 case BUILT_IN_ISFINITE:
2256 CASE_FLT_FN (BUILT_IN_FINITE):
2257 case BUILT_IN_FINITED32:
2258 case BUILT_IN_FINITED64:
2259 case BUILT_IN_FINITED128:
2260 case BUILT_IN_ISINFD32:
2261 case BUILT_IN_ISINFD64:
2262 case BUILT_IN_ISINFD128:
2263 /* These builtins have no optabs (yet). */
2269 /* There's no easy way to detect the case we need to set EDOM. */
2270 if (flag_errno_math && errno_set)
2271 return CODE_FOR_nothing;
2273 /* Optab mode depends on the mode of the input argument. */
2274 mode = TYPE_MODE (TREE_TYPE (arg));
2277 return optab_handler (builtin_optab, mode)->insn_code;
2278 return CODE_FOR_nothing;
2281 /* Expand a call to one of the builtin math functions that operate on
2282 floating point argument and output an integer result (ilogb, isinf,
2284 Return 0 if a normal call should be emitted rather than expanding the
2285 function in-line. EXP is the expression that is a call to the builtin
2286 function; if convenient, the result should be placed in TARGET.
2287 SUBTARGET may be used as the target for computing one of EXP's operands. */
2290 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2292 enum insn_code icode = CODE_FOR_nothing;
2294 tree fndecl = get_callee_fndecl (exp);
2295 enum machine_mode mode;
2298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2301 arg = CALL_EXPR_ARG (exp, 0);
2302 icode = interclass_mathfn_icode (arg, fndecl);
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2305 if (icode != CODE_FOR_nothing)
2307 /* Make a suitable register to place result in. */
2309 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2310 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2312 gcc_assert (insn_data[icode].operand[0].predicate
2313 (target, GET_MODE (target)));
2315 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2316 need to expand the argument again. This way, we will not perform
2317 side-effects more the once. */
2318 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2320 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2322 if (mode != GET_MODE (op0))
2323 op0 = convert_to_mode (mode, op0, 0);
2325 /* Compute into TARGET.
2326 Set TARGET to wherever the result comes back. */
2327 emit_unop_insn (icode, target, op0, UNKNOWN);
2334 /* Expand a call to the builtin sincos math function.
2335 Return NULL_RTX if a normal call should be emitted rather than expanding the
2336 function in-line. EXP is the expression that is a call to the builtin
2340 expand_builtin_sincos (tree exp)
2342 rtx op0, op1, op2, target1, target2;
2343 enum machine_mode mode;
2344 tree arg, sinp, cosp;
2346 location_t loc = EXPR_LOCATION (exp);
2348 if (!validate_arglist (exp, REAL_TYPE,
2349 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2352 arg = CALL_EXPR_ARG (exp, 0);
2353 sinp = CALL_EXPR_ARG (exp, 1);
2354 cosp = CALL_EXPR_ARG (exp, 2);
2356 /* Make a suitable register to place result in. */
2357 mode = TYPE_MODE (TREE_TYPE (arg));
2359 /* Check if sincos insn is available, otherwise emit the call. */
2360 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2363 target1 = gen_reg_rtx (mode);
2364 target2 = gen_reg_rtx (mode);
2366 op0 = expand_normal (arg);
2367 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2368 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2370 /* Compute into target1 and target2.
2371 Set TARGET to wherever the result comes back. */
2372 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2373 gcc_assert (result);
2375 /* Move target1 and target2 to the memory locations indicated
2377 emit_move_insn (op1, target1);
2378 emit_move_insn (op2, target2);
2383 /* Expand a call to the internal cexpi builtin to the sincos math function.
2384 EXP is the expression that is a call to the builtin function; if convenient,
2385 the result should be placed in TARGET. SUBTARGET may be used as the target
2386 for computing one of EXP's operands. */
2389 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2391 tree fndecl = get_callee_fndecl (exp);
2393 enum machine_mode mode;
2395 location_t loc = EXPR_LOCATION (exp);
2397 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2400 arg = CALL_EXPR_ARG (exp, 0);
2401 type = TREE_TYPE (arg);
2402 mode = TYPE_MODE (TREE_TYPE (arg));
2404 /* Try expanding via a sincos optab, fall back to emitting a libcall
2405 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2406 is only generated from sincos, cexp or if we have either of them. */
2407 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2409 op1 = gen_reg_rtx (mode);
2410 op2 = gen_reg_rtx (mode);
2412 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2414 /* Compute into op1 and op2. */
2415 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2417 else if (TARGET_HAS_SINCOS)
2419 tree call, fn = NULL_TREE;
2423 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2424 fn = built_in_decls[BUILT_IN_SINCOSF];
2425 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2426 fn = built_in_decls[BUILT_IN_SINCOS];
2427 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2428 fn = built_in_decls[BUILT_IN_SINCOSL];
2432 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2433 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2434 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2435 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2436 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2437 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2439 /* Make sure not to fold the sincos call again. */
2440 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2441 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2442 call, 3, arg, top1, top2));
2446 tree call, fn = NULL_TREE, narg;
2447 tree ctype = build_complex_type (type);
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 fn = built_in_decls[BUILT_IN_CEXPF];
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 fn = built_in_decls[BUILT_IN_CEXP];
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 fn = built_in_decls[BUILT_IN_CEXPL];
2458 /* If we don't have a decl for cexp create one. This is the
2459 friendliest fallback if the user calls __builtin_cexpi
2460 without full target C99 function support. */
2461 if (fn == NULL_TREE)
2464 const char *name = NULL;
2466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2470 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2473 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2474 fn = build_fn_decl (name, fntype);
2477 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2478 build_real (type, dconst0), arg);
2480 /* Make sure not to fold the cexp call again. */
2481 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2482 return expand_expr (build_call_nary (ctype, call, 1, narg),
2483 target, VOIDmode, EXPAND_NORMAL);
2486 /* Now build the proper return type. */
2487 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2488 make_tree (TREE_TYPE (arg), op2),
2489 make_tree (TREE_TYPE (arg), op1)),
2490 target, VOIDmode, EXPAND_NORMAL);
2493 /* Conveniently construct a function call expression. FNDECL names the
2494 function to be called, N is the number of arguments, and the "..."
2495 parameters are the argument expressions. Unlike build_call_exr
2496 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2499 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2502 tree fntype = TREE_TYPE (fndecl);
2503 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2506 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2508 SET_EXPR_LOCATION (fn, loc);
2511 #define build_call_nofold(...) \
2512 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2514 /* Expand a call to one of the builtin rounding functions gcc defines
2515 as an extension (lfloor and lceil). As these are gcc extensions we
2516 do not need to worry about setting errno to EDOM.
2517 If expanding via optab fails, lower expression to (int)(floor(x)).
2518 EXP is the expression that is a call to the builtin function;
2519 if convenient, the result should be placed in TARGET. */
2522 expand_builtin_int_roundingfn (tree exp, rtx target)
2524 convert_optab builtin_optab;
2525 rtx op0, insns, tmp;
2526 tree fndecl = get_callee_fndecl (exp);
2527 enum built_in_function fallback_fn;
2528 tree fallback_fndecl;
2529 enum machine_mode mode;
2532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2535 arg = CALL_EXPR_ARG (exp, 0);
2537 switch (DECL_FUNCTION_CODE (fndecl))
2539 CASE_FLT_FN (BUILT_IN_LCEIL):
2540 CASE_FLT_FN (BUILT_IN_LLCEIL):
2541 builtin_optab = lceil_optab;
2542 fallback_fn = BUILT_IN_CEIL;
2545 CASE_FLT_FN (BUILT_IN_LFLOOR):
2546 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2547 builtin_optab = lfloor_optab;
2548 fallback_fn = BUILT_IN_FLOOR;
2555 /* Make a suitable register to place result in. */
2556 mode = TYPE_MODE (TREE_TYPE (exp));
2558 target = gen_reg_rtx (mode);
2560 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2561 need to expand the argument again. This way, we will not perform
2562 side-effects more the once. */
2563 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2565 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2569 /* Compute into TARGET. */
2570 if (expand_sfix_optab (target, op0, builtin_optab))
2572 /* Output the entire sequence. */
2573 insns = get_insns ();
2579 /* If we were unable to expand via the builtin, stop the sequence
2580 (without outputting the insns). */
2583 /* Fall back to floating point rounding optab. */
2584 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2586 /* For non-C99 targets we may end up without a fallback fndecl here
2587 if the user called __builtin_lfloor directly. In this case emit
2588 a call to the floor/ceil variants nevertheless. This should result
2589 in the best user experience for not full C99 targets. */
2590 if (fallback_fndecl == NULL_TREE)
2593 const char *name = NULL;
2595 switch (DECL_FUNCTION_CODE (fndecl))
2597 case BUILT_IN_LCEIL:
2598 case BUILT_IN_LLCEIL:
2601 case BUILT_IN_LCEILF:
2602 case BUILT_IN_LLCEILF:
2605 case BUILT_IN_LCEILL:
2606 case BUILT_IN_LLCEILL:
2609 case BUILT_IN_LFLOOR:
2610 case BUILT_IN_LLFLOOR:
2613 case BUILT_IN_LFLOORF:
2614 case BUILT_IN_LLFLOORF:
2617 case BUILT_IN_LFLOORL:
2618 case BUILT_IN_LLFLOORL:
2625 fntype = build_function_type_list (TREE_TYPE (arg),
2626 TREE_TYPE (arg), NULL_TREE);
2627 fallback_fndecl = build_fn_decl (name, fntype);
2630 exp = build_call_nofold (fallback_fndecl, 1, arg);
2632 tmp = expand_normal (exp);
2634 /* Truncate the result of floating point optab to integer
2635 via expand_fix (). */
2636 target = gen_reg_rtx (mode);
2637 expand_fix (target, tmp, 0);
2642 /* Expand a call to one of the builtin math functions doing integer
2644 Return 0 if a normal call should be emitted rather than expanding the
2645 function in-line. EXP is the expression that is a call to the builtin
2646 function; if convenient, the result should be placed in TARGET. */
2649 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2651 convert_optab builtin_optab;
2653 tree fndecl = get_callee_fndecl (exp);
2655 enum machine_mode mode;
2657 /* There's no easy way to detect the case we need to set EDOM. */
2658 if (flag_errno_math)
2661 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2664 arg = CALL_EXPR_ARG (exp, 0);
2666 switch (DECL_FUNCTION_CODE (fndecl))
2668 CASE_FLT_FN (BUILT_IN_LRINT):
2669 CASE_FLT_FN (BUILT_IN_LLRINT):
2670 builtin_optab = lrint_optab; break;
2671 CASE_FLT_FN (BUILT_IN_LROUND):
2672 CASE_FLT_FN (BUILT_IN_LLROUND):
2673 builtin_optab = lround_optab; break;
2678 /* Make a suitable register to place result in. */
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2681 target = gen_reg_rtx (mode);
2683 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2684 need to expand the argument again. This way, we will not perform
2685 side-effects more the once. */
2686 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2688 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2692 if (expand_sfix_optab (target, op0, builtin_optab))
2694 /* Output the entire sequence. */
2695 insns = get_insns ();
2701 /* If we were unable to expand via the builtin, stop the sequence
2702 (without outputting the insns) and call to the library function
2703 with the stabilized argument list. */
2706 target = expand_call (exp, target, target == const0_rtx);
2711 /* To evaluate powi(x,n), the floating point value x raised to the
2712 constant integer exponent n, we use a hybrid algorithm that
2713 combines the "window method" with look-up tables. For an
2714 introduction to exponentiation algorithms and "addition chains",
2715 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2716 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2717 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2718 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2720 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2721 multiplications to inline before calling the system library's pow
2722 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2723 so this default never requires calling pow, powf or powl. */
2725 #ifndef POWI_MAX_MULTS
2726 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2729 /* The size of the "optimal power tree" lookup table. All
2730 exponents less than this value are simply looked up in the
2731 powi_table below. This threshold is also used to size the
2732 cache of pseudo registers that hold intermediate results. */
2733 #define POWI_TABLE_SIZE 256
2735 /* The size, in bits of the window, used in the "window method"
2736 exponentiation algorithm. This is equivalent to a radix of
2737 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2738 #define POWI_WINDOW_SIZE 3
2740 /* The following table is an efficient representation of an
2741 "optimal power tree". For each value, i, the corresponding
2742 value, j, in the table states than an optimal evaluation
2743 sequence for calculating pow(x,i) can be found by evaluating
2744 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2745 100 integers is given in Knuth's "Seminumerical algorithms". */
2747 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2749 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2750 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2751 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2752 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2753 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2754 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2755 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2756 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2757 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2758 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2759 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2760 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2761 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2762 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2763 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2764 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2765 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2766 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2767 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2768 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2769 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2770 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2771 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2772 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2773 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2774 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2775 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2776 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2777 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2778 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2779 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2780 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2784 /* Return the number of multiplications required to calculate
2785 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2786 subroutine of powi_cost. CACHE is an array indicating
2787 which exponents have already been calculated. */
2790 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2792 /* If we've already calculated this exponent, then this evaluation
2793 doesn't require any additional multiplications. */
2798 return powi_lookup_cost (n - powi_table[n], cache)
2799 + powi_lookup_cost (powi_table[n], cache) + 1;
2802 /* Return the number of multiplications required to calculate
2803 powi(x,n) for an arbitrary x, given the exponent N. This
2804 function needs to be kept in sync with expand_powi below. */
2807 powi_cost (HOST_WIDE_INT n)
2809 bool cache[POWI_TABLE_SIZE];
2810 unsigned HOST_WIDE_INT digit;
2811 unsigned HOST_WIDE_INT val;
2817 /* Ignore the reciprocal when calculating the cost. */
2818 val = (n < 0) ? -n : n;
2820 /* Initialize the exponent cache. */
2821 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2826 while (val >= POWI_TABLE_SIZE)
2830 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2831 result += powi_lookup_cost (digit, cache)
2832 + POWI_WINDOW_SIZE + 1;
2833 val >>= POWI_WINDOW_SIZE;
2842 return result + powi_lookup_cost (val, cache);
2845 /* Recursive subroutine of expand_powi. This function takes the array,
2846 CACHE, of already calculated exponents and an exponent N and returns
2847 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2850 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2852 unsigned HOST_WIDE_INT digit;
2856 if (n < POWI_TABLE_SIZE)
2861 target = gen_reg_rtx (mode);
2864 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2865 op1 = expand_powi_1 (mode, powi_table[n], cache);
2869 target = gen_reg_rtx (mode);
2870 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2871 op0 = expand_powi_1 (mode, n - digit, cache);
2872 op1 = expand_powi_1 (mode, digit, cache);
2876 target = gen_reg_rtx (mode);
2877 op0 = expand_powi_1 (mode, n >> 1, cache);
2881 result = expand_mult (mode, op0, op1, target, 0);
2882 if (result != target)
2883 emit_move_insn (target, result);
2887 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2888 floating point operand in mode MODE, and N is the exponent. This
2889 function needs to be kept in sync with powi_cost above. */
2892 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2894 rtx cache[POWI_TABLE_SIZE];
2898 return CONST1_RTX (mode);
2900 memset (cache, 0, sizeof (cache));
2903 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2905 /* If the original exponent was negative, reciprocate the result. */
2907 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2908 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2913 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2914 a normal call should be emitted rather than expanding the function
2915 in-line. EXP is the expression that is a call to the builtin
2916 function; if convenient, the result should be placed in TARGET. */
2919 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2923 tree type = TREE_TYPE (exp);
2924 REAL_VALUE_TYPE cint, c, c2;
2927 enum machine_mode mode = TYPE_MODE (type);
2929 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2932 arg0 = CALL_EXPR_ARG (exp, 0);
2933 arg1 = CALL_EXPR_ARG (exp, 1);
2935 if (TREE_CODE (arg1) != REAL_CST
2936 || TREE_OVERFLOW (arg1))
2937 return expand_builtin_mathfn_2 (exp, target, subtarget);
2939 /* Handle constant exponents. */
2941 /* For integer valued exponents we can expand to an optimal multiplication
2942 sequence using expand_powi. */
2943 c = TREE_REAL_CST (arg1);
2944 n = real_to_integer (&c);
2945 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2946 if (real_identical (&c, &cint)
2947 && ((n >= -1 && n <= 2)
2948 || (flag_unsafe_math_optimizations
2949 && optimize_insn_for_speed_p ()
2950 && powi_cost (n) <= POWI_MAX_MULTS)))
2952 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2955 op = force_reg (mode, op);
2956 op = expand_powi (op, mode, n);
2961 narg0 = builtin_save_expr (arg0);
2963 /* If the exponent is not integer valued, check if it is half of an integer.
2964 In this case we can expand to sqrt (x) * x**(n/2). */
2965 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2966 if (fn != NULL_TREE)
2968 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2969 n = real_to_integer (&c2);
2970 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2971 if (real_identical (&c2, &cint)
2972 && ((flag_unsafe_math_optimizations
2973 && optimize_insn_for_speed_p ()
2974 && powi_cost (n/2) <= POWI_MAX_MULTS)
2977 tree call_expr = build_call_nofold (fn, 1, narg0);
2978 /* Use expand_expr in case the newly built call expression
2979 was folded to a non-call. */
2980 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2983 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2984 op2 = force_reg (mode, op2);
2985 op2 = expand_powi (op2, mode, abs (n / 2));
2986 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2987 0, OPTAB_LIB_WIDEN);
2988 /* If the original exponent was negative, reciprocate the
2991 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2992 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2998 /* Try if the exponent is a third of an integer. In this case
2999 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3000 different from pow (x, 1./3.) due to rounding and behavior
3001 with negative x we need to constrain this transformation to
3002 unsafe math and positive x or finite math. */
3003 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3005 && flag_unsafe_math_optimizations
3006 && (tree_expr_nonnegative_p (arg0)
3007 || !HONOR_NANS (mode)))
3009 REAL_VALUE_TYPE dconst3;
3010 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3011 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3012 real_round (&c2, mode, &c2);
3013 n = real_to_integer (&c2);
3014 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3015 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3016 real_convert (&c2, mode, &c2);
3017 if (real_identical (&c2, &c)
3018 && ((optimize_insn_for_speed_p ()
3019 && powi_cost (n/3) <= POWI_MAX_MULTS)
3022 tree call_expr = build_call_nofold (fn, 1,narg0);
3023 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3024 if (abs (n) % 3 == 2)
3025 op = expand_simple_binop (mode, MULT, op, op, op,
3026 0, OPTAB_LIB_WIDEN);
3029 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3030 op2 = force_reg (mode, op2);
3031 op2 = expand_powi (op2, mode, abs (n / 3));
3032 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3033 0, OPTAB_LIB_WIDEN);
3034 /* If the original exponent was negative, reciprocate the
3037 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3038 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3044 /* Fall back to optab expansion. */
3045 return expand_builtin_mathfn_2 (exp, target, subtarget);
3048 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3049 a normal call should be emitted rather than expanding the function
3050 in-line. EXP is the expression that is a call to the builtin
3051 function; if convenient, the result should be placed in TARGET. */
3054 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3058 enum machine_mode mode;
3059 enum machine_mode mode2;
3061 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3064 arg0 = CALL_EXPR_ARG (exp, 0);
3065 arg1 = CALL_EXPR_ARG (exp, 1);
3066 mode = TYPE_MODE (TREE_TYPE (exp));
3068 /* Handle constant power. */
3070 if (TREE_CODE (arg1) == INTEGER_CST
3071 && !TREE_OVERFLOW (arg1))
3073 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3075 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3076 Otherwise, check the number of multiplications required. */
3077 if ((TREE_INT_CST_HIGH (arg1) == 0
3078 || TREE_INT_CST_HIGH (arg1) == -1)
3079 && ((n >= -1 && n <= 2)
3080 || (optimize_insn_for_speed_p ()
3081 && powi_cost (n) <= POWI_MAX_MULTS)))
3083 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3084 op0 = force_reg (mode, op0);
3085 return expand_powi (op0, mode, n);
3089 /* Emit a libcall to libgcc. */
3091 /* Mode of the 2nd argument must match that of an int. */
3092 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3094 if (target == NULL_RTX)
3095 target = gen_reg_rtx (mode);
3097 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3098 if (GET_MODE (op0) != mode)
3099 op0 = convert_to_mode (mode, op0, 0);
3100 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3101 if (GET_MODE (op1) != mode2)
3102 op1 = convert_to_mode (mode2, op1, 0);
3104 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3105 target, LCT_CONST, mode, 2,
3106 op0, mode, op1, mode2);
3111 /* Expand expression EXP which is a call to the strlen builtin. Return
3112 NULL_RTX if we failed the caller should emit a normal call, otherwise
3113 try to get the result in TARGET, if convenient. */
3116 expand_builtin_strlen (tree exp, rtx target,
3117 enum machine_mode target_mode)
3119 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3125 tree src = CALL_EXPR_ARG (exp, 0);
3126 rtx result, src_reg, char_rtx, before_strlen;
3127 enum machine_mode insn_mode = target_mode, char_mode;
3128 enum insn_code icode = CODE_FOR_nothing;
3131 /* If the length can be computed at compile-time, return it. */
3132 len = c_strlen (src, 0);
3134 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3136 /* If the length can be computed at compile-time and is constant
3137 integer, but there are side-effects in src, evaluate
3138 src for side-effects, then return len.
3139 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3140 can be optimized into: i++; x = 3; */
3141 len = c_strlen (src, 1);
3142 if (len && TREE_CODE (len) == INTEGER_CST)
3144 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3145 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3148 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3150 /* If SRC is not a pointer type, don't do this operation inline. */
3154 /* Bail out if we can't compute strlen in the right mode. */
3155 while (insn_mode != VOIDmode)
3157 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3158 if (icode != CODE_FOR_nothing)
3161 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3163 if (insn_mode == VOIDmode)
3166 /* Make a place to write the result of the instruction. */
3170 && GET_MODE (result) == insn_mode
3171 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3172 result = gen_reg_rtx (insn_mode);
3174 /* Make a place to hold the source address. We will not expand
3175 the actual source until we are sure that the expansion will
3176 not fail -- there are trees that cannot be expanded twice. */
3177 src_reg = gen_reg_rtx (Pmode);
3179 /* Mark the beginning of the strlen sequence so we can emit the
3180 source operand later. */
3181 before_strlen = get_last_insn ();
3183 char_rtx = const0_rtx;
3184 char_mode = insn_data[(int) icode].operand[2].mode;
3185 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3187 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3189 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3190 char_rtx, GEN_INT (align));
3195 /* Now that we are assured of success, expand the source. */
3197 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3199 emit_move_insn (src_reg, pat);
3204 emit_insn_after (pat, before_strlen);
3206 emit_insn_before (pat, get_insns ());
3208 /* Return the value in the proper mode for this function. */
3209 if (GET_MODE (result) == target_mode)
3211 else if (target != 0)
3212 convert_move (target, result, 0);
3214 target = convert_to_mode (target_mode, result, 0);
3220 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3221 bytes from constant string DATA + OFFSET and return it as target
3225 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3226 enum machine_mode mode)
3228 const char *str = (const char *) data;
3230 gcc_assert (offset >= 0
3231 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3232 <= strlen (str) + 1));
3234 return c_readstr (str + offset, mode);
3237 /* Expand a call EXP to the memcpy builtin.
3238 Return NULL_RTX if we failed, the caller should emit a normal call,
3239 otherwise try to get the result in TARGET, if convenient (and in
3240 mode MODE if that's convenient). */
3243 expand_builtin_memcpy (tree exp, rtx target)
3245 if (!validate_arglist (exp,
3246 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3250 tree dest = CALL_EXPR_ARG (exp, 0);
3251 tree src = CALL_EXPR_ARG (exp, 1);
3252 tree len = CALL_EXPR_ARG (exp, 2);
3253 const char *src_str;
3254 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3255 unsigned int dest_align
3256 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3257 rtx dest_mem, src_mem, dest_addr, len_rtx;
3258 HOST_WIDE_INT expected_size = -1;
3259 unsigned int expected_align = 0;
3261 /* If DEST is not a pointer type, call the normal function. */
3262 if (dest_align == 0)
3265 /* If either SRC is not a pointer type, don't do this
3266 operation in-line. */
3270 if (currently_expanding_gimple_stmt)
3271 stringop_block_profile (currently_expanding_gimple_stmt,
3272 &expected_align, &expected_size);
3274 if (expected_align < dest_align)
3275 expected_align = dest_align;
3276 dest_mem = get_memory_rtx (dest, len);
3277 set_mem_align (dest_mem, dest_align);
3278 len_rtx = expand_normal (len);
3279 src_str = c_getstr (src);
3281 /* If SRC is a string constant and block move would be done
3282 by pieces, we can avoid loading the string from memory
3283 and only stored the computed constants. */
3285 && CONST_INT_P (len_rtx)
3286 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3287 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3288 CONST_CAST (char *, src_str),
3291 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3292 builtin_memcpy_read_str,
3293 CONST_CAST (char *, src_str),
3294 dest_align, false, 0);
3295 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3296 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3300 src_mem = get_memory_rtx (src, len);
3301 set_mem_align (src_mem, src_align);
3303 /* Copy word part most expediently. */
3304 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3305 CALL_EXPR_TAILCALL (exp)
3306 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3307 expected_align, expected_size);
3311 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3312 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3318 /* Expand a call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed; the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). If ENDP is 0 return the
3322 destination pointer, if ENDP is 1 return the end pointer ala
3323 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3327 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 1);
3336 tree len = CALL_EXPR_ARG (exp, 2);
3337 return expand_builtin_mempcpy_args (dest, src, len,
3338 target, mode, /*endp=*/ 1);
3342 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3343 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3344 so that this can also be called without constructing an actual CALL_EXPR.
3345 The other arguments and return value are the same as for
3346 expand_builtin_mempcpy. */
3349 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3350 rtx target, enum machine_mode mode, int endp)
3352 /* If return value is ignored, transform mempcpy into memcpy. */
3353 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3355 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3356 tree result = build_call_nofold (fn, 3, dest, src, len);
3357 return expand_expr (result, target, mode, EXPAND_NORMAL);
3361 const char *src_str;
3362 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3363 unsigned int dest_align
3364 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3365 rtx dest_mem, src_mem, len_rtx;
3367 /* If either SRC or DEST is not a pointer type, don't do this
3368 operation in-line. */
3369 if (dest_align == 0 || src_align == 0)
3372 /* If LEN is not constant, call the normal function. */
3373 if (! host_integerp (len, 1))
3376 len_rtx = expand_normal (len);
3377 src_str = c_getstr (src);
3379 /* If SRC is a string constant and block move would be done
3380 by pieces, we can avoid loading the string from memory
3381 and only stored the computed constants. */
3383 && CONST_INT_P (len_rtx)
3384 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3385 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3386 CONST_CAST (char *, src_str),
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3392 builtin_memcpy_read_str,
3393 CONST_CAST (char *, src_str),
3394 dest_align, false, endp);
3395 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3396 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3400 if (CONST_INT_P (len_rtx)
3401 && can_move_by_pieces (INTVAL (len_rtx),
3402 MIN (dest_align, src_align)))
3404 dest_mem = get_memory_rtx (dest, len);
3405 set_mem_align (dest_mem, dest_align);
3406 src_mem = get_memory_rtx (src, len);
3407 set_mem_align (src_mem, src_align);
3408 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3409 MIN (dest_align, src_align), endp);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3420 # define HAVE_movstr 0
3421 # define CODE_FOR_movstr CODE_FOR_nothing
3424 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3425 we failed, the caller should emit a normal call, otherwise try to
3426 get the result in TARGET, if convenient. If ENDP is 0 return the
3427 destination pointer, if ENDP is 1 return the end pointer ala
3428 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3432 expand_movstr (tree dest, tree src, rtx target, int endp)
3438 const struct insn_data * data;
3443 dest_mem = get_memory_rtx (dest, NULL);
3444 src_mem = get_memory_rtx (src, NULL);
3447 target = force_reg (Pmode, XEXP (dest_mem, 0));
3448 dest_mem = replace_equiv_address (dest_mem, target);
3449 end = gen_reg_rtx (Pmode);
3453 if (target == 0 || target == const0_rtx)
3455 end = gen_reg_rtx (Pmode);
3463 data = insn_data + CODE_FOR_movstr;
3465 if (data->operand[0].mode != VOIDmode)
3466 end = gen_lowpart (data->operand[0].mode, end);
3468 insn = data->genfun (end, dest_mem, src_mem);
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3477 if (endp == 1 && target != const0_rtx)
3479 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3480 emit_move_insn (target, force_operand (tem, NULL_RTX));
3486 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call, otherwise
3488 try to get the result in TARGET, if convenient (and in mode MODE if that's
3492 expand_builtin_strcpy (tree exp, rtx target)
3494 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 tree dest = CALL_EXPR_ARG (exp, 0);
3497 tree src = CALL_EXPR_ARG (exp, 1);
3498 return expand_builtin_strcpy_args (dest, src, target);
3503 /* Helper function to do the actual work for expand_builtin_strcpy. The
3504 arguments to the builtin_strcpy call DEST and SRC are broken out
3505 so that this can also be called without constructing an actual CALL_EXPR.
3506 The other arguments and return value are the same as for
3507 expand_builtin_strcpy. */
3510 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3512 return expand_movstr (dest, src, target, /*endp=*/0);
3515 /* Expand a call EXP to the stpcpy builtin.
3516 Return NULL_RTX if we failed the caller should emit a normal call,
3517 otherwise try to get the result in TARGET, if convenient (and in
3518 mode MODE if that's convenient). */
3521 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3524 location_t loc = EXPR_LOCATION (exp);
3526 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3529 dst = CALL_EXPR_ARG (exp, 0);
3530 src = CALL_EXPR_ARG (exp, 1);
3532 /* If return value is ignored, transform stpcpy into strcpy. */
3533 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3535 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3536 tree result = build_call_nofold (fn, 2, dst, src);
3537 return expand_expr (result, target, mode, EXPAND_NORMAL);
3544 /* Ensure we get an actual string whose length can be evaluated at
3545 compile-time, not an expression containing a string. This is
3546 because the latter will potentially produce pessimized code
3547 when used to produce the return value. */
3548 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3549 return expand_movstr (dst, src, target, /*endp=*/2);
3551 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3552 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3553 target, mode, /*endp=*/2);
3558 if (TREE_CODE (len) == INTEGER_CST)
3560 rtx len_rtx = expand_normal (len);
3562 if (CONST_INT_P (len_rtx))
3564 ret = expand_builtin_strcpy_args (dst, src, target);
3570 if (mode != VOIDmode)
3571 target = gen_reg_rtx (mode);
3573 target = gen_reg_rtx (GET_MODE (ret));
3575 if (GET_MODE (target) != GET_MODE (ret))
3576 ret = gen_lowpart (GET_MODE (target), ret);
3578 ret = plus_constant (ret, INTVAL (len_rtx));
3579 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3587 return expand_movstr (dst, src, target, /*endp=*/2);
3591 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3592 bytes from constant string DATA + OFFSET and return it as target
3596 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3597 enum machine_mode mode)
3599 const char *str = (const char *) data;
3601 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return c_readstr (str + offset, mode);
3607 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3608 NULL_RTX if we failed the caller should emit a normal call. */
3611 expand_builtin_strncpy (tree exp, rtx target)
3613 location_t loc = EXPR_LOCATION (exp);
3615 if (validate_arglist (exp,
3616 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3618 tree dest = CALL_EXPR_ARG (exp, 0);
3619 tree src = CALL_EXPR_ARG (exp, 1);
3620 tree len = CALL_EXPR_ARG (exp, 2);
3621 tree slen = c_strlen (src, 1);
3623 /* We must be passed a constant len and src parameter. */
3624 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3627 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3629 /* We're required to pad with trailing zeros if the requested
3630 len is greater than strlen(s2)+1. In that case try to
3631 use store_by_pieces, if it fails, punt. */
3632 if (tree_int_cst_lt (slen, len))
3634 unsigned int dest_align
3635 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3636 const char *p = c_getstr (src);
3639 if (!p || dest_align == 0 || !host_integerp (len, 1)
3640 || !can_store_by_pieces (tree_low_cst (len, 1),
3641 builtin_strncpy_read_str,
3642 CONST_CAST (char *, p),
3646 dest_mem = get_memory_rtx (dest, len);
3647 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3648 builtin_strncpy_read_str,
3649 CONST_CAST (char *, p), dest_align, false, 0);
3650 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3651 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3658 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3659 bytes from constant string DATA + OFFSET and return it as target
3663 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3664 enum machine_mode mode)
3666 const char *c = (const char *) data;
3667 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3669 memset (p, *c, GET_MODE_SIZE (mode));
3671 return c_readstr (p, mode);
3674 /* Callback routine for store_by_pieces. Return the RTL of a register
3675 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3676 char value given in the RTL register data. For example, if mode is
3677 4 bytes wide, return the RTL for 0x01010101*data. */
3680 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3681 enum machine_mode mode)
3687 size = GET_MODE_SIZE (mode);
3691 p = XALLOCAVEC (char, size);
3692 memset (p, 1, size);
3693 coeff = c_readstr (p, mode);
3695 target = convert_to_mode (mode, (rtx) data, 1);
3696 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3697 return force_reg (mode, target);
3700 /* Expand expression EXP, which is a call to the memset builtin. Return
3701 NULL_RTX if we failed the caller should emit a normal call, otherwise
3702 try to get the result in TARGET, if convenient (and in mode MODE if that's
3706 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3708 if (!validate_arglist (exp,
3709 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3713 tree dest = CALL_EXPR_ARG (exp, 0);
3714 tree val = CALL_EXPR_ARG (exp, 1);
3715 tree len = CALL_EXPR_ARG (exp, 2);
3716 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3720 /* Helper function to do the actual work for expand_builtin_memset. The
3721 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3722 so that this can also be called without constructing an actual CALL_EXPR.
3723 The other arguments and return value are the same as for
3724 expand_builtin_memset. */
3727 expand_builtin_memset_args (tree dest, tree val, tree len,
3728 rtx target, enum machine_mode mode, tree orig_exp)
3731 enum built_in_function fcode;
3733 unsigned int dest_align;
3734 rtx dest_mem, dest_addr, len_rtx;
3735 HOST_WIDE_INT expected_size = -1;
3736 unsigned int expected_align = 0;
3738 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3740 /* If DEST is not a pointer type, don't do this operation in-line. */
3741 if (dest_align == 0)
3744 if (currently_expanding_gimple_stmt)
3745 stringop_block_profile (currently_expanding_gimple_stmt,
3746 &expected_align, &expected_size);
3748 if (expected_align < dest_align)
3749 expected_align = dest_align;
3751 /* If the LEN parameter is zero, return DEST. */
3752 if (integer_zerop (len))
3754 /* Evaluate and ignore VAL in case it has side-effects. */
3755 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3756 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3759 /* Stabilize the arguments in case we fail. */
3760 dest = builtin_save_expr (dest);
3761 val = builtin_save_expr (val);
3762 len = builtin_save_expr (len);
3764 len_rtx = expand_normal (len);
3765 dest_mem = get_memory_rtx (dest, len);
3767 if (TREE_CODE (val) != INTEGER_CST)
3771 val_rtx = expand_normal (val);
3772 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3775 /* Assume that we can memset by pieces if we can store
3776 * the coefficients by pieces (in the required modes).
3777 * We can't pass builtin_memset_gen_str as that emits RTL. */
3779 if (host_integerp (len, 1)
3780 && can_store_by_pieces (tree_low_cst (len, 1),
3781 builtin_memset_read_str, &c, dest_align,
3784 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3786 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3787 builtin_memset_gen_str, val_rtx, dest_align,
3790 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3791 dest_align, expected_align,
3795 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3796 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3800 if (target_char_cast (val, &c))
3805 if (host_integerp (len, 1)
3806 && can_store_by_pieces (tree_low_cst (len, 1),
3807 builtin_memset_read_str, &c, dest_align,
3809 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3810 builtin_memset_read_str, &c, dest_align, true, 0);
3811 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3812 dest_align, expected_align,
3816 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3817 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3821 set_mem_align (dest_mem, dest_align);
3822 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3823 CALL_EXPR_TAILCALL (orig_exp)
3824 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3825 expected_align, expected_size);
3829 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3830 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3836 fndecl = get_callee_fndecl (orig_exp);
3837 fcode = DECL_FUNCTION_CODE (fndecl);
3838 if (fcode == BUILT_IN_MEMSET)
3839 fn = build_call_nofold (fndecl, 3, dest, val, len);
3840 else if (fcode == BUILT_IN_BZERO)
3841 fn = build_call_nofold (fndecl, 2, dest, len);
3844 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3845 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3846 return expand_call (fn, target, target == const0_rtx);
3849 /* Expand expression EXP, which is a call to the bzero builtin. Return
3850 NULL_RTX if we failed the caller should emit a normal call. */
3853 expand_builtin_bzero (tree exp)
3856 location_t loc = EXPR_LOCATION (exp);
3858 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3861 dest = CALL_EXPR_ARG (exp, 0);
3862 size = CALL_EXPR_ARG (exp, 1);
3864 /* New argument list transforming bzero(ptr x, int y) to
3865 memset(ptr x, int 0, size_t y). This is done this way
3866 so that if it isn't expanded inline, we fallback to
3867 calling bzero instead of memset. */
3869 return expand_builtin_memset_args (dest, integer_zero_node,
3870 fold_convert_loc (loc, sizetype, size),
3871 const0_rtx, VOIDmode, exp);
3874 /* Expand expression EXP, which is a call to the memcmp built-in function.
3875 Return NULL_RTX if we failed and the
3876 caller should emit a normal call, otherwise try to get the result in
3877 TARGET, if convenient (and in mode MODE, if that's convenient). */
3880 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3881 ATTRIBUTE_UNUSED enum machine_mode mode)
3883 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3885 if (!validate_arglist (exp,
3886 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3889 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3891 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3894 tree arg1 = CALL_EXPR_ARG (exp, 0);
3895 tree arg2 = CALL_EXPR_ARG (exp, 1);
3896 tree len = CALL_EXPR_ARG (exp, 2);
3899 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3901 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3902 enum machine_mode insn_mode;
3904 #ifdef HAVE_cmpmemsi
3906 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3909 #ifdef HAVE_cmpstrnsi
3911 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3916 /* If we don't have POINTER_TYPE, call the function. */
3917 if (arg1_align == 0 || arg2_align == 0)
3920 /* Make a place to write the result of the instruction. */
3923 && REG_P (result) && GET_MODE (result) == insn_mode
3924 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3925 result = gen_reg_rtx (insn_mode);
3927 arg1_rtx = get_memory_rtx (arg1, len);
3928 arg2_rtx = get_memory_rtx (arg2, len);
3929 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3931 /* Set MEM_SIZE as appropriate. */
3932 if (CONST_INT_P (arg3_rtx))
3934 set_mem_size (arg1_rtx, arg3_rtx);
3935 set_mem_size (arg2_rtx, arg3_rtx);
3938 #ifdef HAVE_cmpmemsi
3940 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3941 GEN_INT (MIN (arg1_align, arg2_align)));
3944 #ifdef HAVE_cmpstrnsi
3946 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3947 GEN_INT (MIN (arg1_align, arg2_align)));
3955 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3956 TYPE_MODE (integer_type_node), 3,
3957 XEXP (arg1_rtx, 0), Pmode,
3958 XEXP (arg2_rtx, 0), Pmode,
3959 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3960 TYPE_UNSIGNED (sizetype)),
3961 TYPE_MODE (sizetype));
3963 /* Return the value in the proper mode for this function. */
3964 mode = TYPE_MODE (TREE_TYPE (exp));
3965 if (GET_MODE (result) == mode)
3967 else if (target != 0)
3969 convert_move (target, result, 0);
3973 return convert_to_mode (mode, result, 0);
3980 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3981 if we failed the caller should emit a normal call, otherwise try to get
3982 the result in TARGET, if convenient. */
3985 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3987 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3990 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3991 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3992 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3994 rtx arg1_rtx, arg2_rtx;
3995 rtx result, insn = NULL_RTX;
3997 tree arg1 = CALL_EXPR_ARG (exp, 0);
3998 tree arg2 = CALL_EXPR_ARG (exp, 1);
4001 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4003 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4005 /* If we don't have POINTER_TYPE, call the function. */
4006 if (arg1_align == 0 || arg2_align == 0)
4009 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4010 arg1 = builtin_save_expr (arg1);
4011 arg2 = builtin_save_expr (arg2);
4013 arg1_rtx = get_memory_rtx (arg1, NULL);
4014 arg2_rtx = get_memory_rtx (arg2, NULL);
4016 #ifdef HAVE_cmpstrsi
4017 /* Try to call cmpstrsi. */
4020 enum machine_mode insn_mode
4021 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4023 /* Make a place to write the result of the instruction. */
4026 && REG_P (result) && GET_MODE (result) == insn_mode
4027 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4028 result = gen_reg_rtx (insn_mode);
4030 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4031 GEN_INT (MIN (arg1_align, arg2_align)));
4034 #ifdef HAVE_cmpstrnsi
4035 /* Try to determine at least one length and call cmpstrnsi. */
4036 if (!insn && HAVE_cmpstrnsi)
4041 enum machine_mode insn_mode
4042 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4043 tree len1 = c_strlen (arg1, 1);
4044 tree len2 = c_strlen (arg2, 1);
4047 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4049 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4051 /* If we don't have a constant length for the first, use the length
4052 of the second, if we know it. We don't require a constant for
4053 this case; some cost analysis could be done if both are available
4054 but neither is constant. For now, assume they're equally cheap,
4055 unless one has side effects. If both strings have constant lengths,
4062 else if (TREE_SIDE_EFFECTS (len1))
4064 else if (TREE_SIDE_EFFECTS (len2))
4066 else if (TREE_CODE (len1) != INTEGER_CST)
4068 else if (TREE_CODE (len2) != INTEGER_CST)
4070 else if (tree_int_cst_lt (len1, len2))
4075 /* If both arguments have side effects, we cannot optimize. */
4076 if (!len || TREE_SIDE_EFFECTS (len))
4079 arg3_rtx = expand_normal (len);
4081 /* Make a place to write the result of the instruction. */
4084 && REG_P (result) && GET_MODE (result) == insn_mode
4085 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4086 result = gen_reg_rtx (insn_mode);
4088 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4089 GEN_INT (MIN (arg1_align, arg2_align)));
4095 enum machine_mode mode;
4098 /* Return the value in the proper mode for this function. */
4099 mode = TYPE_MODE (TREE_TYPE (exp));
4100 if (GET_MODE (result) == mode)
4103 return convert_to_mode (mode, result, 0);
4104 convert_move (target, result, 0);
4108 /* Expand the library call ourselves using a stabilized argument
4109 list to avoid re-evaluating the function's arguments twice. */
4110 #ifdef HAVE_cmpstrnsi
4113 fndecl = get_callee_fndecl (exp);
4114 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4115 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4116 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4117 return expand_call (fn, target, target == const0_rtx);
4123 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4124 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4125 the result in TARGET, if convenient. */
4128 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4129 ATTRIBUTE_UNUSED enum machine_mode mode)
4131 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4133 if (!validate_arglist (exp,
4134 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4137 /* If c_strlen can determine an expression for one of the string
4138 lengths, and it doesn't have side effects, then emit cmpstrnsi
4139 using length MIN(strlen(string)+1, arg3). */
4140 #ifdef HAVE_cmpstrnsi
4143 tree len, len1, len2;
4144 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4147 tree arg1 = CALL_EXPR_ARG (exp, 0);
4148 tree arg2 = CALL_EXPR_ARG (exp, 1);
4149 tree arg3 = CALL_EXPR_ARG (exp, 2);
4152 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4154 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4155 enum machine_mode insn_mode
4156 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4158 len1 = c_strlen (arg1, 1);
4159 len2 = c_strlen (arg2, 1);
4162 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4164 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4166 /* If we don't have a constant length for the first, use the length
4167 of the second, if we know it. We don't require a constant for
4168 this case; some cost analysis could be done if both are available
4169 but neither is constant. For now, assume they're equally cheap,
4170 unless one has side effects. If both strings have constant lengths,
4177 else if (TREE_SIDE_EFFECTS (len1))
4179 else if (TREE_SIDE_EFFECTS (len2))
4181 else if (TREE_CODE (len1) != INTEGER_CST)
4183 else if (TREE_CODE (len2) != INTEGER_CST)
4185 else if (tree_int_cst_lt (len1, len2))
4190 /* If both arguments have side effects, we cannot optimize. */
4191 if (!len || TREE_SIDE_EFFECTS (len))
4194 /* The actual new length parameter is MIN(len,arg3). */
4195 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4196 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4198 /* If we don't have POINTER_TYPE, call the function. */
4199 if (arg1_align == 0 || arg2_align == 0)
4202 /* Make a place to write the result of the instruction. */
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4209 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4210 arg1 = builtin_save_expr (arg1);
4211 arg2 = builtin_save_expr (arg2);
4212 len = builtin_save_expr (len);
4214 arg1_rtx = get_memory_rtx (arg1, len);
4215 arg2_rtx = get_memory_rtx (arg2, len);
4216 arg3_rtx = expand_normal (len);
4217 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4218 GEN_INT (MIN (arg1_align, arg2_align)));
4223 /* Return the value in the proper mode for this function. */
4224 mode = TYPE_MODE (TREE_TYPE (exp));
4225 if (GET_MODE (result) == mode)
4228 return convert_to_mode (mode, result, 0);
4229 convert_move (target, result, 0);
4233 /* Expand the library call ourselves using a stabilized argument
4234 list to avoid re-evaluating the function's arguments twice. */
4235 fndecl = get_callee_fndecl (exp);
4236 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4237 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4238 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4239 return expand_call (fn, target, target == const0_rtx);
4245 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4246 if that's convenient. */
4249 expand_builtin_saveregs (void)
4253 /* Don't do __builtin_saveregs more than once in a function.
4254 Save the result of the first call and reuse it. */
4255 if (saveregs_value != 0)
4256 return saveregs_value;
4258 /* When this function is called, it means that registers must be
4259 saved on entry to this function. So we migrate the call to the
4260 first insn of this function. */
4264 /* Do whatever the machine needs done in this case. */
4265 val = targetm.calls.expand_builtin_saveregs ();
4270 saveregs_value = val;
4272 /* Put the insns after the NOTE that starts the function. If this
4273 is inside a start_sequence, make the outer-level insn chain current, so
4274 the code is placed at the start of the function. */
4275 push_topmost_sequence ();
4276 emit_insn_after (seq, entry_of_function ());
4277 pop_topmost_sequence ();
4282 /* __builtin_args_info (N) returns word N of the arg space info
4283 for the current function. The number and meanings of words
4284 is controlled by the definition of CUMULATIVE_ARGS. */
4287 expand_builtin_args_info (tree exp)
4289 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4290 int *word_ptr = (int *) &crtl->args.info;
4292 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4294 if (call_expr_nargs (exp) != 0)
4296 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4297 error ("argument of %<__builtin_args_info%> must be constant");
4300 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4302 if (wordnum < 0 || wordnum >= nwords)
4303 error ("argument of %<__builtin_args_info%> out of range");
4305 return GEN_INT (word_ptr[wordnum]);
4309 error ("missing argument in %<__builtin_args_info%>");
4314 /* Expand a call to __builtin_next_arg. */
4317 expand_builtin_next_arg (void)
4319 /* Checking arguments is already done in fold_builtin_next_arg
4320 that must be called before this function. */
4321 return expand_binop (ptr_mode, add_optab,
4322 crtl->args.internal_arg_pointer,
4323 crtl->args.arg_offset_rtx,
4324 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4327 /* Make it easier for the backends by protecting the valist argument
4328 from multiple evaluations. */
4331 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4333 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4335 gcc_assert (vatype != NULL_TREE);
4337 if (TREE_CODE (vatype) == ARRAY_TYPE)
4339 if (TREE_SIDE_EFFECTS (valist))
4340 valist = save_expr (valist);
4342 /* For this case, the backends will be expecting a pointer to
4343 vatype, but it's possible we've actually been given an array
4344 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4346 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4348 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4349 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4358 if (! TREE_SIDE_EFFECTS (valist))
4361 pt = build_pointer_type (vatype);
4362 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4363 TREE_SIDE_EFFECTS (valist) = 1;
4366 if (TREE_SIDE_EFFECTS (valist))
4367 valist = save_expr (valist);
4368 valist = build_fold_indirect_ref_loc (loc, valist);
4374 /* The "standard" definition of va_list is void*. */
4377 std_build_builtin_va_list (void)
4379 return ptr_type_node;
4382 /* The "standard" abi va_list is va_list_type_node. */
4385 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4387 return va_list_type_node;
4390 /* The "standard" type of va_list is va_list_type_node. */
4393 std_canonical_va_list_type (tree type)
4397 if (INDIRECT_REF_P (type))
4398 type = TREE_TYPE (type);
4399 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4400 type = TREE_TYPE (type);
4401 wtype = va_list_type_node;
4403 /* Treat structure va_list types. */
4404 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4405 htype = TREE_TYPE (htype);
4406 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4408 /* If va_list is an array type, the argument may have decayed
4409 to a pointer type, e.g. by being passed to another function.
4410 In that case, unwrap both types so that we can compare the
4411 underlying records. */
4412 if (TREE_CODE (htype) == ARRAY_TYPE
4413 || POINTER_TYPE_P (htype))
4415 wtype = TREE_TYPE (wtype);
4416 htype = TREE_TYPE (htype);
4419 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4420 return va_list_type_node;
4425 /* The "standard" implementation of va_start: just assign `nextarg' to
4429 std_expand_builtin_va_start (tree valist, rtx nextarg)
4431 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4432 convert_move (va_r, nextarg, 0);
4435 /* Expand EXP, a call to __builtin_va_start. */
4438 expand_builtin_va_start (tree exp)
4442 location_t loc = EXPR_LOCATION (exp);
4444 if (call_expr_nargs (exp) < 2)
4446 error_at (loc, "too few arguments to function %<va_start%>");
4450 if (fold_builtin_next_arg (exp, true))
4453 nextarg = expand_builtin_next_arg ();
4454 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4456 if (targetm.expand_builtin_va_start)
4457 targetm.expand_builtin_va_start (valist, nextarg);
4459 std_expand_builtin_va_start (valist, nextarg);
4464 /* The "standard" implementation of va_arg: read the value from the
4465 current (padded) address and increment by the (padded) size. */
4468 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4471 tree addr, t, type_size, rounded_size, valist_tmp;
4472 unsigned HOST_WIDE_INT align, boundary;
4475 #ifdef ARGS_GROW_DOWNWARD
4476 /* All of the alignment and movement below is for args-grow-up machines.
4477 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4478 implement their own specialized gimplify_va_arg_expr routines. */
4482 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4484 type = build_pointer_type (type);
4486 align = PARM_BOUNDARY / BITS_PER_UNIT;
4487 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4489 /* When we align parameter on stack for caller, if the parameter
4490 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4491 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4492 here with caller. */
4493 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4494 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4496 boundary /= BITS_PER_UNIT;
4498 /* Hoist the valist value into a temporary for the moment. */
4499 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4501 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4502 requires greater alignment, we must perform dynamic alignment. */
4503 if (boundary > align
4504 && !integer_zerop (TYPE_SIZE (type)))
4506 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4507 fold_build2 (POINTER_PLUS_EXPR,
4509 valist_tmp, size_int (boundary - 1)));
4510 gimplify_and_add (t, pre_p);
4512 t = fold_convert (sizetype, valist_tmp);
4513 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4514 fold_convert (TREE_TYPE (valist),
4515 fold_build2 (BIT_AND_EXPR, sizetype, t,
4516 size_int (-boundary))));
4517 gimplify_and_add (t, pre_p);
4522 /* If the actual alignment is less than the alignment of the type,
4523 adjust the type accordingly so that we don't assume strict alignment
4524 when dereferencing the pointer. */
4525 boundary *= BITS_PER_UNIT;
4526 if (boundary < TYPE_ALIGN (type))
4528 type = build_variant_type_copy (type);
4529 TYPE_ALIGN (type) = boundary;
4532 /* Compute the rounded size of the type. */
4533 type_size = size_in_bytes (type);
4534 rounded_size = round_up (type_size, align);
4536 /* Reduce rounded_size so it's sharable with the postqueue. */
4537 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4541 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4543 /* Small args are padded downward. */
4544 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4545 rounded_size, size_int (align));
4546 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4547 size_binop (MINUS_EXPR, rounded_size, type_size));
4548 addr = fold_build2 (POINTER_PLUS_EXPR,
4549 TREE_TYPE (addr), addr, t);
4552 /* Compute new value for AP. */
4553 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4554 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4555 gimplify_and_add (t, pre_p);
4557 addr = fold_convert (build_pointer_type (type), addr);
4560 addr = build_va_arg_indirect_ref (addr);
4562 return build_va_arg_indirect_ref (addr);
4565 /* Build an indirect-ref expression over the given TREE, which represents a
4566 piece of a va_arg() expansion. */
4568 build_va_arg_indirect_ref (tree addr)
4570 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4572 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4578 /* Return a dummy expression of type TYPE in order to keep going after an
4582 dummy_object (tree type)
4584 tree t = build_int_cst (build_pointer_type (type), 0);
4585 return build1 (INDIRECT_REF, type, t);
4588 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4589 builtin function, but a very special sort of operator. */
4591 enum gimplify_status
4592 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4594 tree promoted_type, have_va_type;
4595 tree valist = TREE_OPERAND (*expr_p, 0);
4596 tree type = TREE_TYPE (*expr_p);
4598 location_t loc = EXPR_LOCATION (*expr_p);
4600 /* Verify that valist is of the proper type. */
4601 have_va_type = TREE_TYPE (valist);
4602 if (have_va_type == error_mark_node)
4604 have_va_type = targetm.canonical_va_list_type (have_va_type);
4606 if (have_va_type == NULL_TREE)
4608 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4612 /* Generate a diagnostic for requesting data of a type that cannot
4613 be passed through `...' due to type promotion at the call site. */
4614 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4617 static bool gave_help;
4620 /* Unfortunately, this is merely undefined, rather than a constraint
4621 violation, so we cannot make this an error. If this call is never
4622 executed, the program is still strictly conforming. */
4623 warned = warning_at (loc, 0,
4624 "%qT is promoted to %qT when passed through %<...%>",
4625 type, promoted_type);
4626 if (!gave_help && warned)
4629 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4630 promoted_type, type);
4633 /* We can, however, treat "undefined" any way we please.
4634 Call abort to encourage the user to fix the program. */
4636 inform (loc, "if this code is reached, the program will abort");
4637 /* Before the abort, allow the evaluation of the va_list
4638 expression to exit or longjmp. */
4639 gimplify_and_add (valist, pre_p);
4640 t = build_call_expr_loc (loc,
4641 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4642 gimplify_and_add (t, pre_p);
4644 /* This is dead code, but go ahead and finish so that the
4645 mode of the result comes out right. */
4646 *expr_p = dummy_object (type);
4651 /* Make it easier for the backends by protecting the valist argument
4652 from multiple evaluations. */
4653 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4655 /* For this case, the backends will be expecting a pointer to
4656 TREE_TYPE (abi), but it's possible we've
4657 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4659 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4661 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4662 valist = fold_convert_loc (loc, p1,
4663 build_fold_addr_expr_loc (loc, valist));
4666 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4669 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4671 if (!targetm.gimplify_va_arg_expr)
4672 /* FIXME: Once most targets are converted we should merely
4673 assert this is non-null. */
4676 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4681 /* Expand EXP, a call to __builtin_va_end. */
4684 expand_builtin_va_end (tree exp)
4686 tree valist = CALL_EXPR_ARG (exp, 0);
4688 /* Evaluate for side effects, if needed. I hate macros that don't
4690 if (TREE_SIDE_EFFECTS (valist))
4691 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4696 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4697 builtin rather than just as an assignment in stdarg.h because of the
4698 nastiness of array-type va_list types. */
4701 expand_builtin_va_copy (tree exp)
4704 location_t loc = EXPR_LOCATION (exp);
4706 dst = CALL_EXPR_ARG (exp, 0);
4707 src = CALL_EXPR_ARG (exp, 1);
4709 dst = stabilize_va_list_loc (loc, dst, 1);
4710 src = stabilize_va_list_loc (loc, src, 0);
4712 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4714 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4716 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4717 TREE_SIDE_EFFECTS (t) = 1;
4718 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4722 rtx dstb, srcb, size;
4724 /* Evaluate to pointers. */
4725 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4726 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4727 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4728 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4730 dstb = convert_memory_address (Pmode, dstb);
4731 srcb = convert_memory_address (Pmode, srcb);
4733 /* "Dereference" to BLKmode memories. */
4734 dstb = gen_rtx_MEM (BLKmode, dstb);
4735 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4736 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4737 srcb = gen_rtx_MEM (BLKmode, srcb);
4738 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4739 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4742 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4748 /* Expand a call to one of the builtin functions __builtin_frame_address or
4749 __builtin_return_address. */
4752 expand_builtin_frame_address (tree fndecl, tree exp)
4754 /* The argument must be a nonnegative integer constant.
4755 It counts the number of frames to scan up the stack.
4756 The value is the return address saved in that frame. */
4757 if (call_expr_nargs (exp) == 0)
4758 /* Warning about missing arg was already issued. */
4760 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4762 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4763 error ("invalid argument to %<__builtin_frame_address%>");
4765 error ("invalid argument to %<__builtin_return_address%>");
4771 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4772 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4774 /* Some ports cannot access arbitrary stack frames. */
4777 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4778 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4780 warning (0, "unsupported argument to %<__builtin_return_address%>");
4784 /* For __builtin_frame_address, return what we've got. */
4785 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4789 && ! CONSTANT_P (tem))
4790 tem = copy_to_mode_reg (Pmode, tem);
4795 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4796 we failed and the caller should emit a normal call, otherwise try to get
4797 the result in TARGET, if convenient. */
4800 expand_builtin_alloca (tree exp, rtx target)
4805 /* Emit normal call if marked not-inlineable. */
4806 if (CALL_CANNOT_INLINE_P (exp))
4809 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4812 /* Compute the argument. */
4813 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4815 /* Allocate the desired space. */
4816 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4817 result = convert_memory_address (ptr_mode, result);
4822 /* Expand a call to a bswap builtin with argument ARG0. MODE
4823 is the mode to expand with. */
4826 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4828 enum machine_mode mode;
4832 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4835 arg = CALL_EXPR_ARG (exp, 0);
4836 mode = TYPE_MODE (TREE_TYPE (arg));
4837 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4839 target = expand_unop (mode, bswap_optab, op0, target, 1);
4841 gcc_assert (target);
4843 return convert_to_mode (mode, target, 0);
4846 /* Expand a call to a unary builtin in EXP.
4847 Return NULL_RTX if a normal call should be emitted rather than expanding the
4848 function in-line. If convenient, the result should be placed in TARGET.
4849 SUBTARGET may be used as the target for computing one of EXP's operands. */
4852 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4853 rtx subtarget, optab op_optab)
4857 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4860 /* Compute the argument. */
4861 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4862 VOIDmode, EXPAND_NORMAL);
4863 /* Compute op, into TARGET if possible.
4864 Set TARGET to wherever the result comes back. */
4865 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4866 op_optab, op0, target, 1);
4867 gcc_assert (target);
4869 return convert_to_mode (target_mode, target, 0);
4872 /* Expand a call to __builtin_expect. We just return our argument
4873 as the builtin_expect semantic should've been already executed by
4874 tree branch prediction pass. */
4877 expand_builtin_expect (tree exp, rtx target)
4881 if (call_expr_nargs (exp) < 2)
4883 arg = CALL_EXPR_ARG (exp, 0);
4885 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4886 /* When guessing was done, the hints should be already stripped away. */
4887 gcc_assert (!flag_guess_branch_prob
4888 || optimize == 0 || errorcount || sorrycount);
4893 expand_builtin_trap (void)
4897 emit_insn (gen_trap ());
4900 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4904 /* Expand a call to __builtin_unreachable. We do nothing except emit
4905 a barrier saying that control flow will not pass here.
4907 It is the responsibility of the program being compiled to ensure
4908 that control flow does never reach __builtin_unreachable. */
4910 expand_builtin_unreachable (void)
4915 /* Expand EXP, a call to fabs, fabsf or fabsl.
4916 Return NULL_RTX if a normal call should be emitted rather than expanding
4917 the function inline. If convenient, the result should be placed
4918 in TARGET. SUBTARGET may be used as the target for computing
4922 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4924 enum machine_mode mode;
4928 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4931 arg = CALL_EXPR_ARG (exp, 0);
4932 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4933 mode = TYPE_MODE (TREE_TYPE (arg));
4934 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4935 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4938 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4939 Return NULL is a normal call should be emitted rather than expanding the
4940 function inline. If convenient, the result should be placed in TARGET.
4941 SUBTARGET may be used as the target for computing the operand. */
4944 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4949 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4952 arg = CALL_EXPR_ARG (exp, 0);
4953 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4955 arg = CALL_EXPR_ARG (exp, 1);
4956 op1 = expand_normal (arg);
4958 return expand_copysign (op0, op1, target);
4961 /* Create a new constant string literal and return a char* pointer to it.
4962 The STRING_CST value is the LEN characters at STR. */
4964 build_string_literal (int len, const char *str)
4966 tree t, elem, index, type;
4968 t = build_string (len, str);
4969 elem = build_type_variant (char_type_node, 1, 0);
4970 index = build_index_type (size_int (len - 1));
4971 type = build_array_type (elem, index);
4972 TREE_TYPE (t) = type;
4973 TREE_CONSTANT (t) = 1;
4974 TREE_READONLY (t) = 1;
4975 TREE_STATIC (t) = 1;
4977 type = build_pointer_type (elem);
4978 t = build1 (ADDR_EXPR, type,
4979 build4 (ARRAY_REF, elem,
4980 t, integer_zero_node, NULL_TREE, NULL_TREE));
4984 /* Expand a call to either the entry or exit function profiler. */
4987 expand_builtin_profile_func (bool exitp)
4989 rtx this_rtx, which;
4991 this_rtx = DECL_RTL (current_function_decl);
4992 gcc_assert (MEM_P (this_rtx));
4993 this_rtx = XEXP (this_rtx, 0);
4996 which = profile_function_exit_libfunc;
4998 which = profile_function_entry_libfunc;
5000 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5001 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5008 /* Expand a call to __builtin___clear_cache. */
5011 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5013 #ifndef HAVE_clear_cache
5014 #ifdef CLEAR_INSN_CACHE
5015 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5016 does something. Just do the default expansion to a call to
5020 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5021 does nothing. There is no need to call it. Do nothing. */
5023 #endif /* CLEAR_INSN_CACHE */
5025 /* We have a "clear_cache" insn, and it will handle everything. */
5027 rtx begin_rtx, end_rtx;
5028 enum insn_code icode;
5030 /* We must not expand to a library call. If we did, any
5031 fallback library function in libgcc that might contain a call to
5032 __builtin___clear_cache() would recurse infinitely. */
5033 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5035 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5039 if (HAVE_clear_cache)
5041 icode = CODE_FOR_clear_cache;
5043 begin = CALL_EXPR_ARG (exp, 0);
5044 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5045 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5046 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5047 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5049 end = CALL_EXPR_ARG (exp, 1);
5050 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5051 end_rtx = convert_memory_address (Pmode, end_rtx);
5052 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5053 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5055 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5058 #endif /* HAVE_clear_cache */
5061 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5064 round_trampoline_addr (rtx tramp)
5066 rtx temp, addend, mask;
5068 /* If we don't need too much alignment, we'll have been guaranteed
5069 proper alignment by get_trampoline_type. */
5070 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5073 /* Round address up to desired boundary. */
5074 temp = gen_reg_rtx (Pmode);
5075 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5076 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5078 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5079 temp, 0, OPTAB_LIB_WIDEN);
5080 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5081 temp, 0, OPTAB_LIB_WIDEN);
5087 expand_builtin_init_trampoline (tree exp)
5089 tree t_tramp, t_func, t_chain;
5090 rtx m_tramp, r_tramp, r_chain, tmp;
5092 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5093 POINTER_TYPE, VOID_TYPE))
5096 t_tramp = CALL_EXPR_ARG (exp, 0);
5097 t_func = CALL_EXPR_ARG (exp, 1);
5098 t_chain = CALL_EXPR_ARG (exp, 2);
5100 r_tramp = expand_normal (t_tramp);
5101 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5102 MEM_NOTRAP_P (m_tramp) = 1;
5104 /* The TRAMP argument should be the address of a field within the
5105 local function's FRAME decl. Let's see if we can fill in the
5106 to fill in the MEM_ATTRs for this memory. */
5107 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5108 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5111 tmp = round_trampoline_addr (r_tramp);
5114 m_tramp = change_address (m_tramp, BLKmode, tmp);
5115 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5116 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5119 /* The FUNC argument should be the address of the nested function.
5120 Extract the actual function decl to pass to the hook. */
5121 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5122 t_func = TREE_OPERAND (t_func, 0);
5123 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5125 r_chain = expand_normal (t_chain);
5127 /* Generate insns to initialize the trampoline. */
5128 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5130 trampolines_created = 1;
5135 expand_builtin_adjust_trampoline (tree exp)
5139 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5142 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5143 tramp = round_trampoline_addr (tramp);
5144 if (targetm.calls.trampoline_adjust_address)
5145 tramp = targetm.calls.trampoline_adjust_address (tramp);
5150 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5151 function. The function first checks whether the back end provides
5152 an insn to implement signbit for the respective mode. If not, it
5153 checks whether the floating point format of the value is such that
5154 the sign bit can be extracted. If that is not the case, the
5155 function returns NULL_RTX to indicate that a normal call should be
5156 emitted rather than expanding the function in-line. EXP is the
5157 expression that is a call to the builtin function; if convenient,
5158 the result should be placed in TARGET. */
5160 expand_builtin_signbit (tree exp, rtx target)
5162 const struct real_format *fmt;
5163 enum machine_mode fmode, imode, rmode;
5164 HOST_WIDE_INT hi, lo;
5167 enum insn_code icode;
5169 location_t loc = EXPR_LOCATION (exp);
5171 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5174 arg = CALL_EXPR_ARG (exp, 0);
5175 fmode = TYPE_MODE (TREE_TYPE (arg));
5176 rmode = TYPE_MODE (TREE_TYPE (exp));
5177 fmt = REAL_MODE_FORMAT (fmode);
5179 arg = builtin_save_expr (arg);
5181 /* Expand the argument yielding a RTX expression. */
5182 temp = expand_normal (arg);
5184 /* Check if the back end provides an insn that handles signbit for the
5186 icode = signbit_optab->handlers [(int) fmode].insn_code;
5187 if (icode != CODE_FOR_nothing)
5189 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5190 emit_unop_insn (icode, target, temp, UNKNOWN);
5194 /* For floating point formats without a sign bit, implement signbit
5196 bitpos = fmt->signbit_ro;
5199 /* But we can't do this if the format supports signed zero. */
5200 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5203 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5204 build_real (TREE_TYPE (arg), dconst0));
5205 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5208 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5210 imode = int_mode_for_mode (fmode);
5211 if (imode == BLKmode)
5213 temp = gen_lowpart (imode, temp);
5218 /* Handle targets with different FP word orders. */
5219 if (FLOAT_WORDS_BIG_ENDIAN)
5220 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5222 word = bitpos / BITS_PER_WORD;
5223 temp = operand_subword_force (temp, word, fmode);
5224 bitpos = bitpos % BITS_PER_WORD;
5227 /* Force the intermediate word_mode (or narrower) result into a
5228 register. This avoids attempting to create paradoxical SUBREGs
5229 of floating point modes below. */
5230 temp = force_reg (imode, temp);
5232 /* If the bitpos is within the "result mode" lowpart, the operation
5233 can be implement with a single bitwise AND. Otherwise, we need
5234 a right shift and an AND. */
5236 if (bitpos < GET_MODE_BITSIZE (rmode))
5238 if (bitpos < HOST_BITS_PER_WIDE_INT)
5241 lo = (HOST_WIDE_INT) 1 << bitpos;
5245 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5249 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5250 temp = gen_lowpart (rmode, temp);
5251 temp = expand_binop (rmode, and_optab, temp,
5252 immed_double_const (lo, hi, rmode),
5253 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5257 /* Perform a logical right shift to place the signbit in the least
5258 significant bit, then truncate the result to the desired mode
5259 and mask just this bit. */
5260 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5261 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5262 temp = gen_lowpart (rmode, temp);
5263 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5264 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5270 /* Expand fork or exec calls. TARGET is the desired target of the
5271 call. EXP is the call. FN is the
5272 identificator of the actual function. IGNORE is nonzero if the
5273 value is to be ignored. */
5276 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5281 /* If we are not profiling, just call the function. */
5282 if (!profile_arc_flag)
5285 /* Otherwise call the wrapper. This should be equivalent for the rest of
5286 compiler, so the code does not diverge, and the wrapper may run the
5287 code necessary for keeping the profiling sane. */
5289 switch (DECL_FUNCTION_CODE (fn))
5292 id = get_identifier ("__gcov_fork");
5295 case BUILT_IN_EXECL:
5296 id = get_identifier ("__gcov_execl");
5299 case BUILT_IN_EXECV:
5300 id = get_identifier ("__gcov_execv");
5303 case BUILT_IN_EXECLP:
5304 id = get_identifier ("__gcov_execlp");
5307 case BUILT_IN_EXECLE:
5308 id = get_identifier ("__gcov_execle");
5311 case BUILT_IN_EXECVP:
5312 id = get_identifier ("__gcov_execvp");
5315 case BUILT_IN_EXECVE:
5316 id = get_identifier ("__gcov_execve");
5323 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5324 FUNCTION_DECL, id, TREE_TYPE (fn));
5325 DECL_EXTERNAL (decl) = 1;
5326 TREE_PUBLIC (decl) = 1;
5327 DECL_ARTIFICIAL (decl) = 1;
5328 TREE_NOTHROW (decl) = 1;
5329 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5330 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5331 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5332 return expand_call (call, target, ignore);
5337 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5338 the pointer in these functions is void*, the tree optimizers may remove
5339 casts. The mode computed in expand_builtin isn't reliable either, due
5340 to __sync_bool_compare_and_swap.
5342 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5343 group of builtins. This gives us log2 of the mode size. */
5345 static inline enum machine_mode
5346 get_builtin_sync_mode (int fcode_diff)
5348 /* The size is not negotiable, so ask not to get BLKmode in return
5349 if the target indicates that a smaller size would be better. */
5350 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5353 /* Expand the memory expression LOC and return the appropriate memory operand
5354 for the builtin_sync operations. */
5357 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5361 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5362 addr = convert_memory_address (Pmode, addr);
5364 /* Note that we explicitly do not want any alias information for this
5365 memory, so that we kill all other live memories. Otherwise we don't
5366 satisfy the full barrier semantics of the intrinsic. */
5367 mem = validize_mem (gen_rtx_MEM (mode, addr));
5369 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5370 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5371 MEM_VOLATILE_P (mem) = 1;
5376 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5377 EXP is the CALL_EXPR. CODE is the rtx code
5378 that corresponds to the arithmetic or logical operation from the name;
5379 an exception here is that NOT actually means NAND. TARGET is an optional
5380 place for us to store the results; AFTER is true if this is the
5381 fetch_and_xxx form. IGNORE is true if we don't actually care about
5382 the result of the operation at all. */
5385 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5386 enum rtx_code code, bool after,
5387 rtx target, bool ignore)
5390 enum machine_mode old_mode;
5391 location_t loc = EXPR_LOCATION (exp);
5393 if (code == NOT && warn_sync_nand)
5395 tree fndecl = get_callee_fndecl (exp);
5396 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5398 static bool warned_f_a_n, warned_n_a_f;
5402 case BUILT_IN_FETCH_AND_NAND_1:
5403 case BUILT_IN_FETCH_AND_NAND_2:
5404 case BUILT_IN_FETCH_AND_NAND_4:
5405 case BUILT_IN_FETCH_AND_NAND_8:
5406 case BUILT_IN_FETCH_AND_NAND_16:
5411 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5412 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5413 warned_f_a_n = true;
5416 case BUILT_IN_NAND_AND_FETCH_1:
5417 case BUILT_IN_NAND_AND_FETCH_2:
5418 case BUILT_IN_NAND_AND_FETCH_4:
5419 case BUILT_IN_NAND_AND_FETCH_8:
5420 case BUILT_IN_NAND_AND_FETCH_16:
5425 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5426 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5427 warned_n_a_f = true;
5435 /* Expand the operands. */
5436 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5438 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5439 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5440 of CONST_INTs, where we know the old_mode only from the call argument. */
5441 old_mode = GET_MODE (val);
5442 if (old_mode == VOIDmode)
5443 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5444 val = convert_modes (mode, old_mode, val, 1);
5447 return expand_sync_operation (mem, val, code);
5449 return expand_sync_fetch_operation (mem, val, code, after, target);
5452 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5453 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5454 true if this is the boolean form. TARGET is a place for us to store the
5455 results; this is NOT optional if IS_BOOL is true. */
5458 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5459 bool is_bool, rtx target)
5461 rtx old_val, new_val, mem;
5462 enum machine_mode old_mode;
5464 /* Expand the operands. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5469 mode, EXPAND_NORMAL);
5470 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5471 of CONST_INTs, where we know the old_mode only from the call argument. */
5472 old_mode = GET_MODE (old_val);
5473 if (old_mode == VOIDmode)
5474 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5475 old_val = convert_modes (mode, old_mode, old_val, 1);
5477 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5478 mode, EXPAND_NORMAL);
5479 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5480 of CONST_INTs, where we know the old_mode only from the call argument. */
5481 old_mode = GET_MODE (new_val);
5482 if (old_mode == VOIDmode)
5483 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5484 new_val = convert_modes (mode, old_mode, new_val, 1);
5487 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5489 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5492 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5493 general form is actually an atomic exchange, and some targets only
5494 support a reduced form with the second argument being a constant 1.
5495 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5499 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5503 enum machine_mode old_mode;
5505 /* Expand the operands. */
5506 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5507 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5508 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5509 of CONST_INTs, where we know the old_mode only from the call argument. */
5510 old_mode = GET_MODE (val);
5511 if (old_mode == VOIDmode)
5512 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5513 val = convert_modes (mode, old_mode, val, 1);
5515 return expand_sync_lock_test_and_set (mem, val, target);
5518 /* Expand the __sync_synchronize intrinsic. */
5521 expand_builtin_synchronize (void)
5524 VEC (tree, gc) *v_clobbers;
5526 #ifdef HAVE_memory_barrier
5527 if (HAVE_memory_barrier)
5529 emit_insn (gen_memory_barrier ());
5534 if (synchronize_libfunc != NULL_RTX)
5536 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5540 /* If no explicit memory barrier instruction is available, create an
5541 empty asm stmt with a memory clobber. */
5542 v_clobbers = VEC_alloc (tree, gc, 1);
5543 VEC_quick_push (tree, v_clobbers,
5544 tree_cons (NULL, build_string (6, "memory"), NULL));
5545 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5546 gimple_asm_set_volatile (x, true);
5547 expand_asm_stmt (x);
5550 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5553 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5555 enum insn_code icode;
5557 rtx val = const0_rtx;
5559 /* Expand the operands. */
5560 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5562 /* If there is an explicit operation in the md file, use it. */
5563 icode = sync_lock_release[mode];
5564 if (icode != CODE_FOR_nothing)
5566 if (!insn_data[icode].operand[1].predicate (val, mode))
5567 val = force_reg (mode, val);
5569 insn = GEN_FCN (icode) (mem, val);
5577 /* Otherwise we can implement this operation by emitting a barrier
5578 followed by a store of zero. */
5579 expand_builtin_synchronize ();
5580 emit_move_insn (mem, val);
5583 /* Expand an expression EXP that calls a built-in function,
5584 with result going to TARGET if that's convenient
5585 (and in mode MODE if that's convenient).
5586 SUBTARGET may be used as the target for computing one of EXP's operands.
5587 IGNORE is nonzero if the value is to be ignored. */
5590 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5593 tree fndecl = get_callee_fndecl (exp);
5594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5595 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5597 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5598 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5600 /* When not optimizing, generate calls to library functions for a certain
5603 && !called_as_built_in (fndecl)
5604 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5605 && fcode != BUILT_IN_ALLOCA
5606 && fcode != BUILT_IN_FREE)
5607 return expand_call (exp, target, ignore);
5609 /* The built-in function expanders test for target == const0_rtx
5610 to determine whether the function's result will be ignored. */
5612 target = const0_rtx;
5614 /* If the result of a pure or const built-in function is ignored, and
5615 none of its arguments are volatile, we can avoid expanding the
5616 built-in call and just evaluate the arguments for side-effects. */
5617 if (target == const0_rtx
5618 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5620 bool volatilep = false;
5622 call_expr_arg_iterator iter;
5624 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5625 if (TREE_THIS_VOLATILE (arg))
5633 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5634 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5641 CASE_FLT_FN (BUILT_IN_FABS):
5642 target = expand_builtin_fabs (exp, target, subtarget);
5647 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5648 target = expand_builtin_copysign (exp, target, subtarget);
5653 /* Just do a normal library call if we were unable to fold
5655 CASE_FLT_FN (BUILT_IN_CABS):
5658 CASE_FLT_FN (BUILT_IN_EXP):
5659 CASE_FLT_FN (BUILT_IN_EXP10):
5660 CASE_FLT_FN (BUILT_IN_POW10):
5661 CASE_FLT_FN (BUILT_IN_EXP2):
5662 CASE_FLT_FN (BUILT_IN_EXPM1):
5663 CASE_FLT_FN (BUILT_IN_LOGB):
5664 CASE_FLT_FN (BUILT_IN_LOG):
5665 CASE_FLT_FN (BUILT_IN_LOG10):
5666 CASE_FLT_FN (BUILT_IN_LOG2):
5667 CASE_FLT_FN (BUILT_IN_LOG1P):
5668 CASE_FLT_FN (BUILT_IN_TAN):
5669 CASE_FLT_FN (BUILT_IN_ASIN):
5670 CASE_FLT_FN (BUILT_IN_ACOS):
5671 CASE_FLT_FN (BUILT_IN_ATAN):
5672 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5673 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5674 because of possible accuracy problems. */
5675 if (! flag_unsafe_math_optimizations)
5677 CASE_FLT_FN (BUILT_IN_SQRT):
5678 CASE_FLT_FN (BUILT_IN_FLOOR):
5679 CASE_FLT_FN (BUILT_IN_CEIL):
5680 CASE_FLT_FN (BUILT_IN_TRUNC):
5681 CASE_FLT_FN (BUILT_IN_ROUND):
5682 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5683 CASE_FLT_FN (BUILT_IN_RINT):
5684 target = expand_builtin_mathfn (exp, target, subtarget);
5689 CASE_FLT_FN (BUILT_IN_ILOGB):
5690 if (! flag_unsafe_math_optimizations)
5692 CASE_FLT_FN (BUILT_IN_ISINF):
5693 CASE_FLT_FN (BUILT_IN_FINITE):
5694 case BUILT_IN_ISFINITE:
5695 case BUILT_IN_ISNORMAL:
5696 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5701 CASE_FLT_FN (BUILT_IN_LCEIL):
5702 CASE_FLT_FN (BUILT_IN_LLCEIL):
5703 CASE_FLT_FN (BUILT_IN_LFLOOR):
5704 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5705 target = expand_builtin_int_roundingfn (exp, target);
5710 CASE_FLT_FN (BUILT_IN_LRINT):
5711 CASE_FLT_FN (BUILT_IN_LLRINT):
5712 CASE_FLT_FN (BUILT_IN_LROUND):
5713 CASE_FLT_FN (BUILT_IN_LLROUND):
5714 target = expand_builtin_int_roundingfn_2 (exp, target);
5719 CASE_FLT_FN (BUILT_IN_POW):
5720 target = expand_builtin_pow (exp, target, subtarget);
5725 CASE_FLT_FN (BUILT_IN_POWI):
5726 target = expand_builtin_powi (exp, target, subtarget);
5731 CASE_FLT_FN (BUILT_IN_ATAN2):
5732 CASE_FLT_FN (BUILT_IN_LDEXP):
5733 CASE_FLT_FN (BUILT_IN_SCALB):
5734 CASE_FLT_FN (BUILT_IN_SCALBN):
5735 CASE_FLT_FN (BUILT_IN_SCALBLN):
5736 if (! flag_unsafe_math_optimizations)
5739 CASE_FLT_FN (BUILT_IN_FMOD):
5740 CASE_FLT_FN (BUILT_IN_REMAINDER):
5741 CASE_FLT_FN (BUILT_IN_DREM):
5742 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5747 CASE_FLT_FN (BUILT_IN_CEXPI):
5748 target = expand_builtin_cexpi (exp, target, subtarget);
5749 gcc_assert (target);
5752 CASE_FLT_FN (BUILT_IN_SIN):
5753 CASE_FLT_FN (BUILT_IN_COS):
5754 if (! flag_unsafe_math_optimizations)
5756 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5761 CASE_FLT_FN (BUILT_IN_SINCOS):
5762 if (! flag_unsafe_math_optimizations)
5764 target = expand_builtin_sincos (exp);
5769 case BUILT_IN_APPLY_ARGS:
5770 return expand_builtin_apply_args ();
5772 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5773 FUNCTION with a copy of the parameters described by
5774 ARGUMENTS, and ARGSIZE. It returns a block of memory
5775 allocated on the stack into which is stored all the registers
5776 that might possibly be used for returning the result of a
5777 function. ARGUMENTS is the value returned by
5778 __builtin_apply_args. ARGSIZE is the number of bytes of
5779 arguments that must be copied. ??? How should this value be
5780 computed? We'll also need a safe worst case value for varargs
5782 case BUILT_IN_APPLY:
5783 if (!validate_arglist (exp, POINTER_TYPE,
5784 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5785 && !validate_arglist (exp, REFERENCE_TYPE,
5786 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5792 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5793 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5794 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5796 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5799 /* __builtin_return (RESULT) causes the function to return the
5800 value described by RESULT. RESULT is address of the block of
5801 memory returned by __builtin_apply. */
5802 case BUILT_IN_RETURN:
5803 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5804 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5807 case BUILT_IN_SAVEREGS:
5808 return expand_builtin_saveregs ();
5810 case BUILT_IN_ARGS_INFO:
5811 return expand_builtin_args_info (exp);
5813 case BUILT_IN_VA_ARG_PACK:
5814 /* All valid uses of __builtin_va_arg_pack () are removed during
5816 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5819 case BUILT_IN_VA_ARG_PACK_LEN:
5820 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5822 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5825 /* Return the address of the first anonymous stack arg. */
5826 case BUILT_IN_NEXT_ARG:
5827 if (fold_builtin_next_arg (exp, false))
5829 return expand_builtin_next_arg ();
5831 case BUILT_IN_CLEAR_CACHE:
5832 target = expand_builtin___clear_cache (exp);
5837 case BUILT_IN_CLASSIFY_TYPE:
5838 return expand_builtin_classify_type (exp);
5840 case BUILT_IN_CONSTANT_P:
5843 case BUILT_IN_FRAME_ADDRESS:
5844 case BUILT_IN_RETURN_ADDRESS:
5845 return expand_builtin_frame_address (fndecl, exp);
5847 /* Returns the address of the area where the structure is returned.
5849 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5850 if (call_expr_nargs (exp) != 0
5851 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5852 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5855 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5857 case BUILT_IN_ALLOCA:
5858 target = expand_builtin_alloca (exp, target);
5863 case BUILT_IN_STACK_SAVE:
5864 return expand_stack_save ();
5866 case BUILT_IN_STACK_RESTORE:
5867 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5870 case BUILT_IN_BSWAP32:
5871 case BUILT_IN_BSWAP64:
5872 target = expand_builtin_bswap (exp, target, subtarget);
5878 CASE_INT_FN (BUILT_IN_FFS):
5879 case BUILT_IN_FFSIMAX:
5880 target = expand_builtin_unop (target_mode, exp, target,
5881 subtarget, ffs_optab);
5886 CASE_INT_FN (BUILT_IN_CLZ):
5887 case BUILT_IN_CLZIMAX:
5888 target = expand_builtin_unop (target_mode, exp, target,
5889 subtarget, clz_optab);
5894 CASE_INT_FN (BUILT_IN_CTZ):
5895 case BUILT_IN_CTZIMAX:
5896 target = expand_builtin_unop (target_mode, exp, target,
5897 subtarget, ctz_optab);
5902 CASE_INT_FN (BUILT_IN_POPCOUNT):
5903 case BUILT_IN_POPCOUNTIMAX:
5904 target = expand_builtin_unop (target_mode, exp, target,
5905 subtarget, popcount_optab);
5910 CASE_INT_FN (BUILT_IN_PARITY):
5911 case BUILT_IN_PARITYIMAX:
5912 target = expand_builtin_unop (target_mode, exp, target,
5913 subtarget, parity_optab);
5918 case BUILT_IN_STRLEN:
5919 target = expand_builtin_strlen (exp, target, target_mode);
5924 case BUILT_IN_STRCPY:
5925 target = expand_builtin_strcpy (exp, target);
5930 case BUILT_IN_STRNCPY:
5931 target = expand_builtin_strncpy (exp, target);
5936 case BUILT_IN_STPCPY:
5937 target = expand_builtin_stpcpy (exp, target, mode);
5942 case BUILT_IN_MEMCPY:
5943 target = expand_builtin_memcpy (exp, target);
5948 case BUILT_IN_MEMPCPY:
5949 target = expand_builtin_mempcpy (exp, target, mode);
5954 case BUILT_IN_MEMSET:
5955 target = expand_builtin_memset (exp, target, mode);
5960 case BUILT_IN_BZERO:
5961 target = expand_builtin_bzero (exp);
5966 case BUILT_IN_STRCMP:
5967 target = expand_builtin_strcmp (exp, target);
5972 case BUILT_IN_STRNCMP:
5973 target = expand_builtin_strncmp (exp, target, mode);
5979 case BUILT_IN_MEMCMP:
5980 target = expand_builtin_memcmp (exp, target, mode);
5985 case BUILT_IN_SETJMP:
5986 /* This should have been lowered to the builtins below. */
5989 case BUILT_IN_SETJMP_SETUP:
5990 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5991 and the receiver label. */
5992 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5994 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5995 VOIDmode, EXPAND_NORMAL);
5996 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5997 rtx label_r = label_rtx (label);
5999 /* This is copied from the handling of non-local gotos. */
6000 expand_builtin_setjmp_setup (buf_addr, label_r);
6001 nonlocal_goto_handler_labels
6002 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6003 nonlocal_goto_handler_labels);
6004 /* ??? Do not let expand_label treat us as such since we would
6005 not want to be both on the list of non-local labels and on
6006 the list of forced labels. */
6007 FORCED_LABEL (label) = 0;
6012 case BUILT_IN_SETJMP_DISPATCHER:
6013 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6014 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6016 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6017 rtx label_r = label_rtx (label);
6019 /* Remove the dispatcher label from the list of non-local labels
6020 since the receiver labels have been added to it above. */
6021 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6026 case BUILT_IN_SETJMP_RECEIVER:
6027 /* __builtin_setjmp_receiver is passed the receiver label. */
6028 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6030 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6031 rtx label_r = label_rtx (label);
6033 expand_builtin_setjmp_receiver (label_r);
6038 /* __builtin_longjmp is passed a pointer to an array of five words.
6039 It's similar to the C library longjmp function but works with
6040 __builtin_setjmp above. */
6041 case BUILT_IN_LONGJMP:
6042 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6044 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6045 VOIDmode, EXPAND_NORMAL);
6046 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6048 if (value != const1_rtx)
6050 error ("%<__builtin_longjmp%> second argument must be 1");
6054 expand_builtin_longjmp (buf_addr, value);
6059 case BUILT_IN_NONLOCAL_GOTO:
6060 target = expand_builtin_nonlocal_goto (exp);
6065 /* This updates the setjmp buffer that is its argument with the value
6066 of the current stack pointer. */
6067 case BUILT_IN_UPDATE_SETJMP_BUF:
6068 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6071 = expand_normal (CALL_EXPR_ARG (exp, 0));
6073 expand_builtin_update_setjmp_buf (buf_addr);
6079 expand_builtin_trap ();
6082 case BUILT_IN_UNREACHABLE:
6083 expand_builtin_unreachable ();
6086 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6087 case BUILT_IN_SIGNBITD32:
6088 case BUILT_IN_SIGNBITD64:
6089 case BUILT_IN_SIGNBITD128:
6090 target = expand_builtin_signbit (exp, target);
6095 /* Various hooks for the DWARF 2 __throw routine. */
6096 case BUILT_IN_UNWIND_INIT:
6097 expand_builtin_unwind_init ();
6099 case BUILT_IN_DWARF_CFA:
6100 return virtual_cfa_rtx;
6101 #ifdef DWARF2_UNWIND_INFO
6102 case BUILT_IN_DWARF_SP_COLUMN:
6103 return expand_builtin_dwarf_sp_column ();
6104 case BUILT_IN_INIT_DWARF_REG_SIZES:
6105 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6108 case BUILT_IN_FROB_RETURN_ADDR:
6109 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6110 case BUILT_IN_EXTRACT_RETURN_ADDR:
6111 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6112 case BUILT_IN_EH_RETURN:
6113 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6114 CALL_EXPR_ARG (exp, 1));
6116 #ifdef EH_RETURN_DATA_REGNO
6117 case BUILT_IN_EH_RETURN_DATA_REGNO:
6118 return expand_builtin_eh_return_data_regno (exp);
6120 case BUILT_IN_EXTEND_POINTER:
6121 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6122 case BUILT_IN_EH_POINTER:
6123 return expand_builtin_eh_pointer (exp);
6124 case BUILT_IN_EH_FILTER:
6125 return expand_builtin_eh_filter (exp);
6126 case BUILT_IN_EH_COPY_VALUES:
6127 return expand_builtin_eh_copy_values (exp);
6129 case BUILT_IN_VA_START:
6130 return expand_builtin_va_start (exp);
6131 case BUILT_IN_VA_END:
6132 return expand_builtin_va_end (exp);
6133 case BUILT_IN_VA_COPY:
6134 return expand_builtin_va_copy (exp);
6135 case BUILT_IN_EXPECT:
6136 return expand_builtin_expect (exp, target);
6137 case BUILT_IN_PREFETCH:
6138 expand_builtin_prefetch (exp);
6141 case BUILT_IN_PROFILE_FUNC_ENTER:
6142 return expand_builtin_profile_func (false);
6143 case BUILT_IN_PROFILE_FUNC_EXIT:
6144 return expand_builtin_profile_func (true);
6146 case BUILT_IN_INIT_TRAMPOLINE:
6147 return expand_builtin_init_trampoline (exp);
6148 case BUILT_IN_ADJUST_TRAMPOLINE:
6149 return expand_builtin_adjust_trampoline (exp);
6152 case BUILT_IN_EXECL:
6153 case BUILT_IN_EXECV:
6154 case BUILT_IN_EXECLP:
6155 case BUILT_IN_EXECLE:
6156 case BUILT_IN_EXECVP:
6157 case BUILT_IN_EXECVE:
6158 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6163 case BUILT_IN_FETCH_AND_ADD_1:
6164 case BUILT_IN_FETCH_AND_ADD_2:
6165 case BUILT_IN_FETCH_AND_ADD_4:
6166 case BUILT_IN_FETCH_AND_ADD_8:
6167 case BUILT_IN_FETCH_AND_ADD_16:
6168 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6169 target = expand_builtin_sync_operation (mode, exp, PLUS,
6170 false, target, ignore);
6175 case BUILT_IN_FETCH_AND_SUB_1:
6176 case BUILT_IN_FETCH_AND_SUB_2:
6177 case BUILT_IN_FETCH_AND_SUB_4:
6178 case BUILT_IN_FETCH_AND_SUB_8:
6179 case BUILT_IN_FETCH_AND_SUB_16:
6180 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6181 target = expand_builtin_sync_operation (mode, exp, MINUS,
6182 false, target, ignore);
6187 case BUILT_IN_FETCH_AND_OR_1:
6188 case BUILT_IN_FETCH_AND_OR_2:
6189 case BUILT_IN_FETCH_AND_OR_4:
6190 case BUILT_IN_FETCH_AND_OR_8:
6191 case BUILT_IN_FETCH_AND_OR_16:
6192 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6193 target = expand_builtin_sync_operation (mode, exp, IOR,
6194 false, target, ignore);
6199 case BUILT_IN_FETCH_AND_AND_1:
6200 case BUILT_IN_FETCH_AND_AND_2:
6201 case BUILT_IN_FETCH_AND_AND_4:
6202 case BUILT_IN_FETCH_AND_AND_8:
6203 case BUILT_IN_FETCH_AND_AND_16:
6204 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6205 target = expand_builtin_sync_operation (mode, exp, AND,
6206 false, target, ignore);
6211 case BUILT_IN_FETCH_AND_XOR_1:
6212 case BUILT_IN_FETCH_AND_XOR_2:
6213 case BUILT_IN_FETCH_AND_XOR_4:
6214 case BUILT_IN_FETCH_AND_XOR_8:
6215 case BUILT_IN_FETCH_AND_XOR_16:
6216 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6217 target = expand_builtin_sync_operation (mode, exp, XOR,
6218 false, target, ignore);
6223 case BUILT_IN_FETCH_AND_NAND_1:
6224 case BUILT_IN_FETCH_AND_NAND_2:
6225 case BUILT_IN_FETCH_AND_NAND_4:
6226 case BUILT_IN_FETCH_AND_NAND_8:
6227 case BUILT_IN_FETCH_AND_NAND_16:
6228 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6229 target = expand_builtin_sync_operation (mode, exp, NOT,
6230 false, target, ignore);
6235 case BUILT_IN_ADD_AND_FETCH_1:
6236 case BUILT_IN_ADD_AND_FETCH_2:
6237 case BUILT_IN_ADD_AND_FETCH_4:
6238 case BUILT_IN_ADD_AND_FETCH_8:
6239 case BUILT_IN_ADD_AND_FETCH_16:
6240 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6241 target = expand_builtin_sync_operation (mode, exp, PLUS,
6242 true, target, ignore);
6247 case BUILT_IN_SUB_AND_FETCH_1:
6248 case BUILT_IN_SUB_AND_FETCH_2:
6249 case BUILT_IN_SUB_AND_FETCH_4:
6250 case BUILT_IN_SUB_AND_FETCH_8:
6251 case BUILT_IN_SUB_AND_FETCH_16:
6252 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6253 target = expand_builtin_sync_operation (mode, exp, MINUS,
6254 true, target, ignore);
6259 case BUILT_IN_OR_AND_FETCH_1:
6260 case BUILT_IN_OR_AND_FETCH_2:
6261 case BUILT_IN_OR_AND_FETCH_4:
6262 case BUILT_IN_OR_AND_FETCH_8:
6263 case BUILT_IN_OR_AND_FETCH_16:
6264 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6265 target = expand_builtin_sync_operation (mode, exp, IOR,
6266 true, target, ignore);
6271 case BUILT_IN_AND_AND_FETCH_1:
6272 case BUILT_IN_AND_AND_FETCH_2:
6273 case BUILT_IN_AND_AND_FETCH_4:
6274 case BUILT_IN_AND_AND_FETCH_8:
6275 case BUILT_IN_AND_AND_FETCH_16:
6276 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6277 target = expand_builtin_sync_operation (mode, exp, AND,
6278 true, target, ignore);
6283 case BUILT_IN_XOR_AND_FETCH_1:
6284 case BUILT_IN_XOR_AND_FETCH_2:
6285 case BUILT_IN_XOR_AND_FETCH_4:
6286 case BUILT_IN_XOR_AND_FETCH_8:
6287 case BUILT_IN_XOR_AND_FETCH_16:
6288 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6289 target = expand_builtin_sync_operation (mode, exp, XOR,
6290 true, target, ignore);
6295 case BUILT_IN_NAND_AND_FETCH_1:
6296 case BUILT_IN_NAND_AND_FETCH_2:
6297 case BUILT_IN_NAND_AND_FETCH_4:
6298 case BUILT_IN_NAND_AND_FETCH_8:
6299 case BUILT_IN_NAND_AND_FETCH_16:
6300 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6301 target = expand_builtin_sync_operation (mode, exp, NOT,
6302 true, target, ignore);
6307 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6308 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6309 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6310 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6311 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6312 if (mode == VOIDmode)
6313 mode = TYPE_MODE (boolean_type_node);
6314 if (!target || !register_operand (target, mode))
6315 target = gen_reg_rtx (mode);
6317 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6318 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6323 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6324 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6325 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6326 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6327 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6329 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6334 case BUILT_IN_LOCK_TEST_AND_SET_1:
6335 case BUILT_IN_LOCK_TEST_AND_SET_2:
6336 case BUILT_IN_LOCK_TEST_AND_SET_4:
6337 case BUILT_IN_LOCK_TEST_AND_SET_8:
6338 case BUILT_IN_LOCK_TEST_AND_SET_16:
6339 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6340 target = expand_builtin_lock_test_and_set (mode, exp, target);
6345 case BUILT_IN_LOCK_RELEASE_1:
6346 case BUILT_IN_LOCK_RELEASE_2:
6347 case BUILT_IN_LOCK_RELEASE_4:
6348 case BUILT_IN_LOCK_RELEASE_8:
6349 case BUILT_IN_LOCK_RELEASE_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6351 expand_builtin_lock_release (mode, exp);
6354 case BUILT_IN_SYNCHRONIZE:
6355 expand_builtin_synchronize ();
6358 case BUILT_IN_OBJECT_SIZE:
6359 return expand_builtin_object_size (exp);
6361 case BUILT_IN_MEMCPY_CHK:
6362 case BUILT_IN_MEMPCPY_CHK:
6363 case BUILT_IN_MEMMOVE_CHK:
6364 case BUILT_IN_MEMSET_CHK:
6365 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6370 case BUILT_IN_STRCPY_CHK:
6371 case BUILT_IN_STPCPY_CHK:
6372 case BUILT_IN_STRNCPY_CHK:
6373 case BUILT_IN_STRCAT_CHK:
6374 case BUILT_IN_STRNCAT_CHK:
6375 case BUILT_IN_SNPRINTF_CHK:
6376 case BUILT_IN_VSNPRINTF_CHK:
6377 maybe_emit_chk_warning (exp, fcode);
6380 case BUILT_IN_SPRINTF_CHK:
6381 case BUILT_IN_VSPRINTF_CHK:
6382 maybe_emit_sprintf_chk_warning (exp, fcode);
6386 maybe_emit_free_warning (exp);
6389 default: /* just do library call, if unknown builtin */
6393 /* The switch statement above can drop through to cause the function
6394 to be called normally. */
6395 return expand_call (exp, target, ignore);
6398 /* Determine whether a tree node represents a call to a built-in
6399 function. If the tree T is a call to a built-in function with
6400 the right number of arguments of the appropriate types, return
6401 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6402 Otherwise the return value is END_BUILTINS. */
6404 enum built_in_function
6405 builtin_mathfn_code (const_tree t)
6407 const_tree fndecl, arg, parmlist;
6408 const_tree argtype, parmtype;
6409 const_call_expr_arg_iterator iter;
6411 if (TREE_CODE (t) != CALL_EXPR
6412 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6413 return END_BUILTINS;
6415 fndecl = get_callee_fndecl (t);
6416 if (fndecl == NULL_TREE
6417 || TREE_CODE (fndecl) != FUNCTION_DECL
6418 || ! DECL_BUILT_IN (fndecl)
6419 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6420 return END_BUILTINS;
6422 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6423 init_const_call_expr_arg_iterator (t, &iter);
6424 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6426 /* If a function doesn't take a variable number of arguments,
6427 the last element in the list will have type `void'. */
6428 parmtype = TREE_VALUE (parmlist);
6429 if (VOID_TYPE_P (parmtype))
6431 if (more_const_call_expr_args_p (&iter))
6432 return END_BUILTINS;
6433 return DECL_FUNCTION_CODE (fndecl);
6436 if (! more_const_call_expr_args_p (&iter))
6437 return END_BUILTINS;
6439 arg = next_const_call_expr_arg (&iter);
6440 argtype = TREE_TYPE (arg);
6442 if (SCALAR_FLOAT_TYPE_P (parmtype))
6444 if (! SCALAR_FLOAT_TYPE_P (argtype))
6445 return END_BUILTINS;
6447 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6449 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6450 return END_BUILTINS;
6452 else if (POINTER_TYPE_P (parmtype))
6454 if (! POINTER_TYPE_P (argtype))
6455 return END_BUILTINS;
6457 else if (INTEGRAL_TYPE_P (parmtype))
6459 if (! INTEGRAL_TYPE_P (argtype))
6460 return END_BUILTINS;
6463 return END_BUILTINS;
6466 /* Variable-length argument list. */
6467 return DECL_FUNCTION_CODE (fndecl);
6470 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6471 evaluate to a constant. */
6474 fold_builtin_constant_p (tree arg)
6476 /* We return 1 for a numeric type that's known to be a constant
6477 value at compile-time or for an aggregate type that's a
6478 literal constant. */
6481 /* If we know this is a constant, emit the constant of one. */
6482 if (CONSTANT_CLASS_P (arg)
6483 || (TREE_CODE (arg) == CONSTRUCTOR
6484 && TREE_CONSTANT (arg)))
6485 return integer_one_node;
6486 if (TREE_CODE (arg) == ADDR_EXPR)
6488 tree op = TREE_OPERAND (arg, 0);
6489 if (TREE_CODE (op) == STRING_CST
6490 || (TREE_CODE (op) == ARRAY_REF
6491 && integer_zerop (TREE_OPERAND (op, 1))
6492 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6493 return integer_one_node;
6496 /* If this expression has side effects, show we don't know it to be a
6497 constant. Likewise if it's a pointer or aggregate type since in
6498 those case we only want literals, since those are only optimized
6499 when generating RTL, not later.
6500 And finally, if we are compiling an initializer, not code, we
6501 need to return a definite result now; there's not going to be any
6502 more optimization done. */
6503 if (TREE_SIDE_EFFECTS (arg)
6504 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6505 || POINTER_TYPE_P (TREE_TYPE (arg))
6507 || folding_initializer)
6508 return integer_zero_node;
6513 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6514 return it as a truthvalue. */
6517 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6519 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6521 fn = built_in_decls[BUILT_IN_EXPECT];
6522 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6523 ret_type = TREE_TYPE (TREE_TYPE (fn));
6524 pred_type = TREE_VALUE (arg_types);
6525 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6527 pred = fold_convert_loc (loc, pred_type, pred);
6528 expected = fold_convert_loc (loc, expected_type, expected);
6529 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6531 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6532 build_int_cst (ret_type, 0));
6535 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6536 NULL_TREE if no simplification is possible. */
6539 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6542 enum tree_code code;
6544 /* If this is a builtin_expect within a builtin_expect keep the
6545 inner one. See through a comparison against a constant. It
6546 might have been added to create a thruthvalue. */
6548 if (COMPARISON_CLASS_P (inner)
6549 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6550 inner = TREE_OPERAND (inner, 0);
6552 if (TREE_CODE (inner) == CALL_EXPR
6553 && (fndecl = get_callee_fndecl (inner))
6554 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6555 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6558 /* Distribute the expected value over short-circuiting operators.
6559 See through the cast from truthvalue_type_node to long. */
6561 while (TREE_CODE (inner) == NOP_EXPR
6562 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6563 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6564 inner = TREE_OPERAND (inner, 0);
6566 code = TREE_CODE (inner);
6567 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6569 tree op0 = TREE_OPERAND (inner, 0);
6570 tree op1 = TREE_OPERAND (inner, 1);
6572 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6573 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6574 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6576 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6579 /* If the argument isn't invariant then there's nothing else we can do. */
6580 if (!TREE_CONSTANT (arg0))
6583 /* If we expect that a comparison against the argument will fold to
6584 a constant return the constant. In practice, this means a true
6585 constant or the address of a non-weak symbol. */
6588 if (TREE_CODE (inner) == ADDR_EXPR)
6592 inner = TREE_OPERAND (inner, 0);
6594 while (TREE_CODE (inner) == COMPONENT_REF
6595 || TREE_CODE (inner) == ARRAY_REF);
6596 if ((TREE_CODE (inner) == VAR_DECL
6597 || TREE_CODE (inner) == FUNCTION_DECL)
6598 && DECL_WEAK (inner))
6602 /* Otherwise, ARG0 already has the proper type for the return value. */
6606 /* Fold a call to __builtin_classify_type with argument ARG. */
6609 fold_builtin_classify_type (tree arg)
6612 return build_int_cst (NULL_TREE, no_type_class);
6614 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6617 /* Fold a call to __builtin_strlen with argument ARG. */
6620 fold_builtin_strlen (location_t loc, tree arg)
6622 if (!validate_arg (arg, POINTER_TYPE))
6626 tree len = c_strlen (arg, 0);
6630 /* Convert from the internal "sizetype" type to "size_t". */
6632 len = fold_convert_loc (loc, size_type_node, len);
6640 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6643 fold_builtin_inf (location_t loc, tree type, int warn)
6645 REAL_VALUE_TYPE real;
6647 /* __builtin_inff is intended to be usable to define INFINITY on all
6648 targets. If an infinity is not available, INFINITY expands "to a
6649 positive constant of type float that overflows at translation
6650 time", footnote "In this case, using INFINITY will violate the
6651 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6652 Thus we pedwarn to ensure this constraint violation is
6654 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6655 pedwarn (loc, 0, "target format does not support infinity");
6658 return build_real (type, real);
6661 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6664 fold_builtin_nan (tree arg, tree type, int quiet)
6666 REAL_VALUE_TYPE real;
6669 if (!validate_arg (arg, POINTER_TYPE))
6671 str = c_getstr (arg);
6675 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6678 return build_real (type, real);
6681 /* Return true if the floating point expression T has an integer value.
6682 We also allow +Inf, -Inf and NaN to be considered integer values. */
6685 integer_valued_real_p (tree t)
6687 switch (TREE_CODE (t))
6694 return integer_valued_real_p (TREE_OPERAND (t, 0));
6699 return integer_valued_real_p (TREE_OPERAND (t, 1));
6706 return integer_valued_real_p (TREE_OPERAND (t, 0))
6707 && integer_valued_real_p (TREE_OPERAND (t, 1));
6710 return integer_valued_real_p (TREE_OPERAND (t, 1))
6711 && integer_valued_real_p (TREE_OPERAND (t, 2));
6714 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6718 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6719 if (TREE_CODE (type) == INTEGER_TYPE)
6721 if (TREE_CODE (type) == REAL_TYPE)
6722 return integer_valued_real_p (TREE_OPERAND (t, 0));
6727 switch (builtin_mathfn_code (t))
6729 CASE_FLT_FN (BUILT_IN_CEIL):
6730 CASE_FLT_FN (BUILT_IN_FLOOR):
6731 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6732 CASE_FLT_FN (BUILT_IN_RINT):
6733 CASE_FLT_FN (BUILT_IN_ROUND):
6734 CASE_FLT_FN (BUILT_IN_TRUNC):
6737 CASE_FLT_FN (BUILT_IN_FMIN):
6738 CASE_FLT_FN (BUILT_IN_FMAX):
6739 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6740 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6753 /* FNDECL is assumed to be a builtin where truncation can be propagated
6754 across (for instance floor((double)f) == (double)floorf (f).
6755 Do the transformation for a call with argument ARG. */
6758 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6762 if (!validate_arg (arg, REAL_TYPE))
6765 /* Integer rounding functions are idempotent. */
6766 if (fcode == builtin_mathfn_code (arg))
6769 /* If argument is already integer valued, and we don't need to worry
6770 about setting errno, there's no need to perform rounding. */
6771 if (! flag_errno_math && integer_valued_real_p (arg))
6776 tree arg0 = strip_float_extensions (arg);
6777 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6778 tree newtype = TREE_TYPE (arg0);
6781 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6782 && (decl = mathfn_built_in (newtype, fcode)))
6783 return fold_convert_loc (loc, ftype,
6784 build_call_expr_loc (loc, decl, 1,
6785 fold_convert_loc (loc,
6792 /* FNDECL is assumed to be builtin which can narrow the FP type of
6793 the argument, for instance lround((double)f) -> lroundf (f).
6794 Do the transformation for a call with argument ARG. */
6797 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6799 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6801 if (!validate_arg (arg, REAL_TYPE))
6804 /* If argument is already integer valued, and we don't need to worry
6805 about setting errno, there's no need to perform rounding. */
6806 if (! flag_errno_math && integer_valued_real_p (arg))
6807 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6808 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6812 tree ftype = TREE_TYPE (arg);
6813 tree arg0 = strip_float_extensions (arg);
6814 tree newtype = TREE_TYPE (arg0);
6817 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6818 && (decl = mathfn_built_in (newtype, fcode)))
6819 return build_call_expr_loc (loc, decl, 1,
6820 fold_convert_loc (loc, newtype, arg0));
6823 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6824 sizeof (long long) == sizeof (long). */
6825 if (TYPE_PRECISION (long_long_integer_type_node)
6826 == TYPE_PRECISION (long_integer_type_node))
6828 tree newfn = NULL_TREE;
6831 CASE_FLT_FN (BUILT_IN_LLCEIL):
6832 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6835 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6836 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6839 CASE_FLT_FN (BUILT_IN_LLROUND):
6840 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6843 CASE_FLT_FN (BUILT_IN_LLRINT):
6844 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6853 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6854 return fold_convert_loc (loc,
6855 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6862 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6863 return type. Return NULL_TREE if no simplification can be made. */
6866 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6870 if (!validate_arg (arg, COMPLEX_TYPE)
6871 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6874 /* Calculate the result when the argument is a constant. */
6875 if (TREE_CODE (arg) == COMPLEX_CST
6876 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6880 if (TREE_CODE (arg) == COMPLEX_EXPR)
6882 tree real = TREE_OPERAND (arg, 0);
6883 tree imag = TREE_OPERAND (arg, 1);
6885 /* If either part is zero, cabs is fabs of the other. */
6886 if (real_zerop (real))
6887 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6888 if (real_zerop (imag))
6889 return fold_build1_loc (loc, ABS_EXPR, type, real);
6891 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6892 if (flag_unsafe_math_optimizations
6893 && operand_equal_p (real, imag, OEP_PURE_SAME))
6895 const REAL_VALUE_TYPE sqrt2_trunc
6896 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6898 return fold_build2_loc (loc, MULT_EXPR, type,
6899 fold_build1_loc (loc, ABS_EXPR, type, real),
6900 build_real (type, sqrt2_trunc));
6904 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6905 if (TREE_CODE (arg) == NEGATE_EXPR
6906 || TREE_CODE (arg) == CONJ_EXPR)
6907 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6909 /* Don't do this when optimizing for size. */
6910 if (flag_unsafe_math_optimizations
6911 && optimize && optimize_function_for_speed_p (cfun))
6913 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6915 if (sqrtfn != NULL_TREE)
6917 tree rpart, ipart, result;
6919 arg = builtin_save_expr (arg);
6921 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6922 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6924 rpart = builtin_save_expr (rpart);
6925 ipart = builtin_save_expr (ipart);
6927 result = fold_build2_loc (loc, PLUS_EXPR, type,
6928 fold_build2_loc (loc, MULT_EXPR, type,
6930 fold_build2_loc (loc, MULT_EXPR, type,
6933 return build_call_expr_loc (loc, sqrtfn, 1, result);
6940 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6941 Return NULL_TREE if no simplification can be made. */
6944 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6947 enum built_in_function fcode;
6950 if (!validate_arg (arg, REAL_TYPE))
6953 /* Calculate the result when the argument is a constant. */
6954 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6957 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6958 fcode = builtin_mathfn_code (arg);
6959 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6961 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6962 arg = fold_build2_loc (loc, MULT_EXPR, type,
6963 CALL_EXPR_ARG (arg, 0),
6964 build_real (type, dconsthalf));
6965 return build_call_expr_loc (loc, expfn, 1, arg);
6968 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6969 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6971 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6975 tree arg0 = CALL_EXPR_ARG (arg, 0);
6977 /* The inner root was either sqrt or cbrt. */
6978 /* This was a conditional expression but it triggered a bug
6980 REAL_VALUE_TYPE dconstroot;
6981 if (BUILTIN_SQRT_P (fcode))
6982 dconstroot = dconsthalf;
6984 dconstroot = dconst_third ();
6986 /* Adjust for the outer root. */
6987 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6988 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6989 tree_root = build_real (type, dconstroot);
6990 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6994 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6995 if (flag_unsafe_math_optimizations
6996 && (fcode == BUILT_IN_POW
6997 || fcode == BUILT_IN_POWF
6998 || fcode == BUILT_IN_POWL))
7000 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7001 tree arg0 = CALL_EXPR_ARG (arg, 0);
7002 tree arg1 = CALL_EXPR_ARG (arg, 1);
7004 if (!tree_expr_nonnegative_p (arg0))
7005 arg0 = build1 (ABS_EXPR, type, arg0);
7006 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7007 build_real (type, dconsthalf));
7008 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7014 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7015 Return NULL_TREE if no simplification can be made. */
7018 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7020 const enum built_in_function fcode = builtin_mathfn_code (arg);
7023 if (!validate_arg (arg, REAL_TYPE))
7026 /* Calculate the result when the argument is a constant. */
7027 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7030 if (flag_unsafe_math_optimizations)
7032 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7033 if (BUILTIN_EXPONENT_P (fcode))
7035 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7036 const REAL_VALUE_TYPE third_trunc =
7037 real_value_truncate (TYPE_MODE (type), dconst_third ());
7038 arg = fold_build2_loc (loc, MULT_EXPR, type,
7039 CALL_EXPR_ARG (arg, 0),
7040 build_real (type, third_trunc));
7041 return build_call_expr_loc (loc, expfn, 1, arg);
7044 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7045 if (BUILTIN_SQRT_P (fcode))
7047 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7051 tree arg0 = CALL_EXPR_ARG (arg, 0);
7053 REAL_VALUE_TYPE dconstroot = dconst_third ();
7055 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7056 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7057 tree_root = build_real (type, dconstroot);
7058 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7062 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7063 if (BUILTIN_CBRT_P (fcode))
7065 tree arg0 = CALL_EXPR_ARG (arg, 0);
7066 if (tree_expr_nonnegative_p (arg0))
7068 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7073 REAL_VALUE_TYPE dconstroot;
7075 real_arithmetic (&dconstroot, MULT_EXPR,
7076 dconst_third_ptr (), dconst_third_ptr ());
7077 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7078 tree_root = build_real (type, dconstroot);
7079 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7084 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7085 if (fcode == BUILT_IN_POW
7086 || fcode == BUILT_IN_POWF
7087 || fcode == BUILT_IN_POWL)
7089 tree arg00 = CALL_EXPR_ARG (arg, 0);
7090 tree arg01 = CALL_EXPR_ARG (arg, 1);
7091 if (tree_expr_nonnegative_p (arg00))
7093 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7094 const REAL_VALUE_TYPE dconstroot
7095 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7096 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7097 build_real (type, dconstroot));
7098 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7105 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7106 TYPE is the type of the return value. Return NULL_TREE if no
7107 simplification can be made. */
7110 fold_builtin_cos (location_t loc,
7111 tree arg, tree type, tree fndecl)
7115 if (!validate_arg (arg, REAL_TYPE))
7118 /* Calculate the result when the argument is a constant. */
7119 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7122 /* Optimize cos(-x) into cos (x). */
7123 if ((narg = fold_strip_sign_ops (arg)))
7124 return build_call_expr_loc (loc, fndecl, 1, narg);
7129 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7130 Return NULL_TREE if no simplification can be made. */
7133 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7135 if (validate_arg (arg, REAL_TYPE))
7139 /* Calculate the result when the argument is a constant. */
7140 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7143 /* Optimize cosh(-x) into cosh (x). */
7144 if ((narg = fold_strip_sign_ops (arg)))
7145 return build_call_expr_loc (loc, fndecl, 1, narg);
7151 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7152 argument ARG. TYPE is the type of the return value. Return
7153 NULL_TREE if no simplification can be made. */
7156 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7159 if (validate_arg (arg, COMPLEX_TYPE)
7160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7164 /* Calculate the result when the argument is a constant. */
7165 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7168 /* Optimize fn(-x) into fn(x). */
7169 if ((tmp = fold_strip_sign_ops (arg)))
7170 return build_call_expr_loc (loc, fndecl, 1, tmp);
7176 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7177 Return NULL_TREE if no simplification can be made. */
7180 fold_builtin_tan (tree arg, tree type)
7182 enum built_in_function fcode;
7185 if (!validate_arg (arg, REAL_TYPE))
7188 /* Calculate the result when the argument is a constant. */
7189 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7192 /* Optimize tan(atan(x)) = x. */
7193 fcode = builtin_mathfn_code (arg);
7194 if (flag_unsafe_math_optimizations
7195 && (fcode == BUILT_IN_ATAN
7196 || fcode == BUILT_IN_ATANF
7197 || fcode == BUILT_IN_ATANL))
7198 return CALL_EXPR_ARG (arg, 0);
7203 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7204 NULL_TREE if no simplification can be made. */
7207 fold_builtin_sincos (location_t loc,
7208 tree arg0, tree arg1, tree arg2)
7213 if (!validate_arg (arg0, REAL_TYPE)
7214 || !validate_arg (arg1, POINTER_TYPE)
7215 || !validate_arg (arg2, POINTER_TYPE))
7218 type = TREE_TYPE (arg0);
7220 /* Calculate the result when the argument is a constant. */
7221 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7224 /* Canonicalize sincos to cexpi. */
7225 if (!TARGET_C99_FUNCTIONS)
7227 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7231 call = build_call_expr_loc (loc, fn, 1, arg0);
7232 call = builtin_save_expr (call);
7234 return build2 (COMPOUND_EXPR, void_type_node,
7235 build2 (MODIFY_EXPR, void_type_node,
7236 build_fold_indirect_ref_loc (loc, arg1),
7237 build1 (IMAGPART_EXPR, type, call)),
7238 build2 (MODIFY_EXPR, void_type_node,
7239 build_fold_indirect_ref_loc (loc, arg2),
7240 build1 (REALPART_EXPR, type, call)));
7243 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7244 NULL_TREE if no simplification can be made. */
7247 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7250 tree realp, imagp, ifn;
7253 if (!validate_arg (arg0, COMPLEX_TYPE)
7254 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7257 /* Calculate the result when the argument is a constant. */
7258 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7261 rtype = TREE_TYPE (TREE_TYPE (arg0));
7263 /* In case we can figure out the real part of arg0 and it is constant zero
7265 if (!TARGET_C99_FUNCTIONS)
7267 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7271 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7272 && real_zerop (realp))
7274 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7275 return build_call_expr_loc (loc, ifn, 1, narg);
7278 /* In case we can easily decompose real and imaginary parts split cexp
7279 to exp (r) * cexpi (i). */
7280 if (flag_unsafe_math_optimizations
7283 tree rfn, rcall, icall;
7285 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7289 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7293 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7294 icall = builtin_save_expr (icall);
7295 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7296 rcall = builtin_save_expr (rcall);
7297 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7298 fold_build2_loc (loc, MULT_EXPR, rtype,
7300 fold_build1_loc (loc, REALPART_EXPR,
7302 fold_build2_loc (loc, MULT_EXPR, rtype,
7304 fold_build1_loc (loc, IMAGPART_EXPR,
7311 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7312 Return NULL_TREE if no simplification can be made. */
7315 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7317 if (!validate_arg (arg, REAL_TYPE))
7320 /* Optimize trunc of constant value. */
7321 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7323 REAL_VALUE_TYPE r, x;
7324 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7326 x = TREE_REAL_CST (arg);
7327 real_trunc (&r, TYPE_MODE (type), &x);
7328 return build_real (type, r);
7331 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7334 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7335 Return NULL_TREE if no simplification can be made. */
7338 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7340 if (!validate_arg (arg, REAL_TYPE))
7343 /* Optimize floor of constant value. */
7344 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7348 x = TREE_REAL_CST (arg);
7349 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7351 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7354 real_floor (&r, TYPE_MODE (type), &x);
7355 return build_real (type, r);
7359 /* Fold floor (x) where x is nonnegative to trunc (x). */
7360 if (tree_expr_nonnegative_p (arg))
7362 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7364 return build_call_expr_loc (loc, truncfn, 1, arg);
7367 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7370 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7371 Return NULL_TREE if no simplification can be made. */
7374 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7376 if (!validate_arg (arg, REAL_TYPE))
7379 /* Optimize ceil of constant value. */
7380 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7384 x = TREE_REAL_CST (arg);
7385 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7387 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7390 real_ceil (&r, TYPE_MODE (type), &x);
7391 return build_real (type, r);
7395 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7398 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7399 Return NULL_TREE if no simplification can be made. */
7402 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7404 if (!validate_arg (arg, REAL_TYPE))
7407 /* Optimize round of constant value. */
7408 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7412 x = TREE_REAL_CST (arg);
7413 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7415 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7418 real_round (&r, TYPE_MODE (type), &x);
7419 return build_real (type, r);
7423 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7426 /* Fold function call to builtin lround, lroundf or lroundl (or the
7427 corresponding long long versions) and other rounding functions. ARG
7428 is the argument to the call. Return NULL_TREE if no simplification
7432 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7434 if (!validate_arg (arg, REAL_TYPE))
7437 /* Optimize lround of constant value. */
7438 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7440 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7442 if (real_isfinite (&x))
7444 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7445 tree ftype = TREE_TYPE (arg);
7446 unsigned HOST_WIDE_INT lo2;
7447 HOST_WIDE_INT hi, lo;
7450 switch (DECL_FUNCTION_CODE (fndecl))
7452 CASE_FLT_FN (BUILT_IN_LFLOOR):
7453 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7454 real_floor (&r, TYPE_MODE (ftype), &x);
7457 CASE_FLT_FN (BUILT_IN_LCEIL):
7458 CASE_FLT_FN (BUILT_IN_LLCEIL):
7459 real_ceil (&r, TYPE_MODE (ftype), &x);
7462 CASE_FLT_FN (BUILT_IN_LROUND):
7463 CASE_FLT_FN (BUILT_IN_LLROUND):
7464 real_round (&r, TYPE_MODE (ftype), &x);
7471 REAL_VALUE_TO_INT (&lo, &hi, r);
7472 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7473 return build_int_cst_wide (itype, lo2, hi);
7477 switch (DECL_FUNCTION_CODE (fndecl))
7479 CASE_FLT_FN (BUILT_IN_LFLOOR):
7480 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7481 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7482 if (tree_expr_nonnegative_p (arg))
7483 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7484 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7489 return fold_fixed_mathfn (loc, fndecl, arg);
7492 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7493 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7494 the argument to the call. Return NULL_TREE if no simplification can
7498 fold_builtin_bitop (tree fndecl, tree arg)
7500 if (!validate_arg (arg, INTEGER_TYPE))
7503 /* Optimize for constant argument. */
7504 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7506 HOST_WIDE_INT hi, width, result;
7507 unsigned HOST_WIDE_INT lo;
7510 type = TREE_TYPE (arg);
7511 width = TYPE_PRECISION (type);
7512 lo = TREE_INT_CST_LOW (arg);
7514 /* Clear all the bits that are beyond the type's precision. */
7515 if (width > HOST_BITS_PER_WIDE_INT)
7517 hi = TREE_INT_CST_HIGH (arg);
7518 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7519 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7524 if (width < HOST_BITS_PER_WIDE_INT)
7525 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7528 switch (DECL_FUNCTION_CODE (fndecl))
7530 CASE_INT_FN (BUILT_IN_FFS):
7532 result = exact_log2 (lo & -lo) + 1;
7534 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7539 CASE_INT_FN (BUILT_IN_CLZ):
7541 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7543 result = width - floor_log2 (lo) - 1;
7544 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7548 CASE_INT_FN (BUILT_IN_CTZ):
7550 result = exact_log2 (lo & -lo);
7552 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7553 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7557 CASE_INT_FN (BUILT_IN_POPCOUNT):
7560 result++, lo &= lo - 1;
7562 result++, hi &= hi - 1;
7565 CASE_INT_FN (BUILT_IN_PARITY):
7568 result++, lo &= lo - 1;
7570 result++, hi &= hi - 1;
7578 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7584 /* Fold function call to builtin_bswap and the long and long long
7585 variants. Return NULL_TREE if no simplification can be made. */
7587 fold_builtin_bswap (tree fndecl, tree arg)
7589 if (! validate_arg (arg, INTEGER_TYPE))
7592 /* Optimize constant value. */
7593 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7595 HOST_WIDE_INT hi, width, r_hi = 0;
7596 unsigned HOST_WIDE_INT lo, r_lo = 0;
7599 type = TREE_TYPE (arg);
7600 width = TYPE_PRECISION (type);
7601 lo = TREE_INT_CST_LOW (arg);
7602 hi = TREE_INT_CST_HIGH (arg);
7604 switch (DECL_FUNCTION_CODE (fndecl))
7606 case BUILT_IN_BSWAP32:
7607 case BUILT_IN_BSWAP64:
7611 for (s = 0; s < width; s += 8)
7613 int d = width - s - 8;
7614 unsigned HOST_WIDE_INT byte;
7616 if (s < HOST_BITS_PER_WIDE_INT)
7617 byte = (lo >> s) & 0xff;
7619 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7621 if (d < HOST_BITS_PER_WIDE_INT)
7624 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7634 if (width < HOST_BITS_PER_WIDE_INT)
7635 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7637 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7643 /* A subroutine of fold_builtin to fold the various logarithmic
7644 functions. Return NULL_TREE if no simplification can me made.
7645 FUNC is the corresponding MPFR logarithm function. */
7648 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7649 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7651 if (validate_arg (arg, REAL_TYPE))
7653 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7655 const enum built_in_function fcode = builtin_mathfn_code (arg);
7657 /* Calculate the result when the argument is a constant. */
7658 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7661 /* Special case, optimize logN(expN(x)) = x. */
7662 if (flag_unsafe_math_optimizations
7663 && ((func == mpfr_log
7664 && (fcode == BUILT_IN_EXP
7665 || fcode == BUILT_IN_EXPF
7666 || fcode == BUILT_IN_EXPL))
7667 || (func == mpfr_log2
7668 && (fcode == BUILT_IN_EXP2
7669 || fcode == BUILT_IN_EXP2F
7670 || fcode == BUILT_IN_EXP2L))
7671 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7672 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7674 /* Optimize logN(func()) for various exponential functions. We
7675 want to determine the value "x" and the power "exponent" in
7676 order to transform logN(x**exponent) into exponent*logN(x). */
7677 if (flag_unsafe_math_optimizations)
7679 tree exponent = 0, x = 0;
7683 CASE_FLT_FN (BUILT_IN_EXP):
7684 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7685 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7687 exponent = CALL_EXPR_ARG (arg, 0);
7689 CASE_FLT_FN (BUILT_IN_EXP2):
7690 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7691 x = build_real (type, dconst2);
7692 exponent = CALL_EXPR_ARG (arg, 0);
7694 CASE_FLT_FN (BUILT_IN_EXP10):
7695 CASE_FLT_FN (BUILT_IN_POW10):
7696 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7698 REAL_VALUE_TYPE dconst10;
7699 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7700 x = build_real (type, dconst10);
7702 exponent = CALL_EXPR_ARG (arg, 0);
7704 CASE_FLT_FN (BUILT_IN_SQRT):
7705 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7706 x = CALL_EXPR_ARG (arg, 0);
7707 exponent = build_real (type, dconsthalf);
7709 CASE_FLT_FN (BUILT_IN_CBRT):
7710 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7711 x = CALL_EXPR_ARG (arg, 0);
7712 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7715 CASE_FLT_FN (BUILT_IN_POW):
7716 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7717 x = CALL_EXPR_ARG (arg, 0);
7718 exponent = CALL_EXPR_ARG (arg, 1);
7724 /* Now perform the optimization. */
7727 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7728 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7736 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7737 NULL_TREE if no simplification can be made. */
7740 fold_builtin_hypot (location_t loc, tree fndecl,
7741 tree arg0, tree arg1, tree type)
7743 tree res, narg0, narg1;
7745 if (!validate_arg (arg0, REAL_TYPE)
7746 || !validate_arg (arg1, REAL_TYPE))
7749 /* Calculate the result when the argument is a constant. */
7750 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7753 /* If either argument to hypot has a negate or abs, strip that off.
7754 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7755 narg0 = fold_strip_sign_ops (arg0);
7756 narg1 = fold_strip_sign_ops (arg1);
7759 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7760 narg1 ? narg1 : arg1);
7763 /* If either argument is zero, hypot is fabs of the other. */
7764 if (real_zerop (arg0))
7765 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7766 else if (real_zerop (arg1))
7767 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7769 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7770 if (flag_unsafe_math_optimizations
7771 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7773 const REAL_VALUE_TYPE sqrt2_trunc
7774 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7775 return fold_build2_loc (loc, MULT_EXPR, type,
7776 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7777 build_real (type, sqrt2_trunc));
7784 /* Fold a builtin function call to pow, powf, or powl. Return
7785 NULL_TREE if no simplification can be made. */
7787 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7791 if (!validate_arg (arg0, REAL_TYPE)
7792 || !validate_arg (arg1, REAL_TYPE))
7795 /* Calculate the result when the argument is a constant. */
7796 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7799 /* Optimize pow(1.0,y) = 1.0. */
7800 if (real_onep (arg0))
7801 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7803 if (TREE_CODE (arg1) == REAL_CST
7804 && !TREE_OVERFLOW (arg1))
7806 REAL_VALUE_TYPE cint;
7810 c = TREE_REAL_CST (arg1);
7812 /* Optimize pow(x,0.0) = 1.0. */
7813 if (REAL_VALUES_EQUAL (c, dconst0))
7814 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7817 /* Optimize pow(x,1.0) = x. */
7818 if (REAL_VALUES_EQUAL (c, dconst1))
7821 /* Optimize pow(x,-1.0) = 1.0/x. */
7822 if (REAL_VALUES_EQUAL (c, dconstm1))
7823 return fold_build2_loc (loc, RDIV_EXPR, type,
7824 build_real (type, dconst1), arg0);
7826 /* Optimize pow(x,0.5) = sqrt(x). */
7827 if (flag_unsafe_math_optimizations
7828 && REAL_VALUES_EQUAL (c, dconsthalf))
7830 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7832 if (sqrtfn != NULL_TREE)
7833 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7836 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7837 if (flag_unsafe_math_optimizations)
7839 const REAL_VALUE_TYPE dconstroot
7840 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7842 if (REAL_VALUES_EQUAL (c, dconstroot))
7844 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7845 if (cbrtfn != NULL_TREE)
7846 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7850 /* Check for an integer exponent. */
7851 n = real_to_integer (&c);
7852 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7853 if (real_identical (&c, &cint))
7855 /* Attempt to evaluate pow at compile-time, unless this should
7856 raise an exception. */
7857 if (TREE_CODE (arg0) == REAL_CST
7858 && !TREE_OVERFLOW (arg0)
7860 || (!flag_trapping_math && !flag_errno_math)
7861 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7866 x = TREE_REAL_CST (arg0);
7867 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7868 if (flag_unsafe_math_optimizations || !inexact)
7869 return build_real (type, x);
7872 /* Strip sign ops from even integer powers. */
7873 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7875 tree narg0 = fold_strip_sign_ops (arg0);
7877 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7882 if (flag_unsafe_math_optimizations)
7884 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7886 /* Optimize pow(expN(x),y) = expN(x*y). */
7887 if (BUILTIN_EXPONENT_P (fcode))
7889 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7890 tree arg = CALL_EXPR_ARG (arg0, 0);
7891 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7892 return build_call_expr_loc (loc, expfn, 1, arg);
7895 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7896 if (BUILTIN_SQRT_P (fcode))
7898 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7899 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7900 build_real (type, dconsthalf));
7901 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7904 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7905 if (BUILTIN_CBRT_P (fcode))
7907 tree arg = CALL_EXPR_ARG (arg0, 0);
7908 if (tree_expr_nonnegative_p (arg))
7910 const REAL_VALUE_TYPE dconstroot
7911 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7912 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7913 build_real (type, dconstroot));
7914 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7918 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7919 if (fcode == BUILT_IN_POW
7920 || fcode == BUILT_IN_POWF
7921 || fcode == BUILT_IN_POWL)
7923 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7924 if (tree_expr_nonnegative_p (arg00))
7926 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7927 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7928 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7936 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7937 Return NULL_TREE if no simplification can be made. */
7939 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7940 tree arg0, tree arg1, tree type)
7942 if (!validate_arg (arg0, REAL_TYPE)
7943 || !validate_arg (arg1, INTEGER_TYPE))
7946 /* Optimize pow(1.0,y) = 1.0. */
7947 if (real_onep (arg0))
7948 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7950 if (host_integerp (arg1, 0))
7952 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7954 /* Evaluate powi at compile-time. */
7955 if (TREE_CODE (arg0) == REAL_CST
7956 && !TREE_OVERFLOW (arg0))
7959 x = TREE_REAL_CST (arg0);
7960 real_powi (&x, TYPE_MODE (type), &x, c);
7961 return build_real (type, x);
7964 /* Optimize pow(x,0) = 1.0. */
7966 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7969 /* Optimize pow(x,1) = x. */
7973 /* Optimize pow(x,-1) = 1.0/x. */
7975 return fold_build2_loc (loc, RDIV_EXPR, type,
7976 build_real (type, dconst1), arg0);
7982 /* A subroutine of fold_builtin to fold the various exponent
7983 functions. Return NULL_TREE if no simplification can be made.
7984 FUNC is the corresponding MPFR exponent function. */
7987 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7988 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7990 if (validate_arg (arg, REAL_TYPE))
7992 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7995 /* Calculate the result when the argument is a constant. */
7996 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7999 /* Optimize expN(logN(x)) = x. */
8000 if (flag_unsafe_math_optimizations)
8002 const enum built_in_function fcode = builtin_mathfn_code (arg);
8004 if ((func == mpfr_exp
8005 && (fcode == BUILT_IN_LOG
8006 || fcode == BUILT_IN_LOGF
8007 || fcode == BUILT_IN_LOGL))
8008 || (func == mpfr_exp2
8009 && (fcode == BUILT_IN_LOG2
8010 || fcode == BUILT_IN_LOG2F
8011 || fcode == BUILT_IN_LOG2L))
8012 || (func == mpfr_exp10
8013 && (fcode == BUILT_IN_LOG10
8014 || fcode == BUILT_IN_LOG10F
8015 || fcode == BUILT_IN_LOG10L)))
8016 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8023 /* Return true if VAR is a VAR_DECL or a component thereof. */
8026 var_decl_component_p (tree var)
8029 while (handled_component_p (inner))
8030 inner = TREE_OPERAND (inner, 0);
8031 return SSA_VAR_P (inner);
8034 /* Fold function call to builtin memset. Return
8035 NULL_TREE if no simplification can be made. */
8038 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8039 tree type, bool ignore)
8041 tree var, ret, etype;
8042 unsigned HOST_WIDE_INT length, cval;
8044 if (! validate_arg (dest, POINTER_TYPE)
8045 || ! validate_arg (c, INTEGER_TYPE)
8046 || ! validate_arg (len, INTEGER_TYPE))
8049 if (! host_integerp (len, 1))
8052 /* If the LEN parameter is zero, return DEST. */
8053 if (integer_zerop (len))
8054 return omit_one_operand_loc (loc, type, dest, c);
8056 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8061 if (TREE_CODE (var) != ADDR_EXPR)
8064 var = TREE_OPERAND (var, 0);
8065 if (TREE_THIS_VOLATILE (var))
8068 etype = TREE_TYPE (var);
8069 if (TREE_CODE (etype) == ARRAY_TYPE)
8070 etype = TREE_TYPE (etype);
8072 if (!INTEGRAL_TYPE_P (etype)
8073 && !POINTER_TYPE_P (etype))
8076 if (! var_decl_component_p (var))
8079 length = tree_low_cst (len, 1);
8080 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8081 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8085 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8088 if (integer_zerop (c))
8092 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8095 cval = tree_low_cst (c, 1);
8099 cval |= (cval << 31) << 1;
8102 ret = build_int_cst_type (etype, cval);
8103 var = build_fold_indirect_ref_loc (loc,
8104 fold_convert_loc (loc,
8105 build_pointer_type (etype),
8107 ret = build2 (MODIFY_EXPR, etype, var, ret);
8111 return omit_one_operand_loc (loc, type, dest, ret);
8114 /* Fold function call to builtin memset. Return
8115 NULL_TREE if no simplification can be made. */
8118 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8120 if (! validate_arg (dest, POINTER_TYPE)
8121 || ! validate_arg (size, INTEGER_TYPE))
8127 /* New argument list transforming bzero(ptr x, int y) to
8128 memset(ptr x, int 0, size_t y). This is done this way
8129 so that if it isn't expanded inline, we fallback to
8130 calling bzero instead of memset. */
8132 return fold_builtin_memset (loc, dest, integer_zero_node,
8133 fold_convert_loc (loc, sizetype, size),
8134 void_type_node, ignore);
8137 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8138 NULL_TREE if no simplification can be made.
8139 If ENDP is 0, return DEST (like memcpy).
8140 If ENDP is 1, return DEST+LEN (like mempcpy).
8141 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8142 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8146 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8147 tree len, tree type, bool ignore, int endp)
8149 tree destvar, srcvar, expr;
8151 if (! validate_arg (dest, POINTER_TYPE)
8152 || ! validate_arg (src, POINTER_TYPE)
8153 || ! validate_arg (len, INTEGER_TYPE))
8156 /* If the LEN parameter is zero, return DEST. */
8157 if (integer_zerop (len))
8158 return omit_one_operand_loc (loc, type, dest, src);
8160 /* If SRC and DEST are the same (and not volatile), return
8161 DEST{,+LEN,+LEN-1}. */
8162 if (operand_equal_p (src, dest, 0))
8166 tree srctype, desttype;
8167 int src_align, dest_align;
8171 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8172 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8174 /* Both DEST and SRC must be pointer types.
8175 ??? This is what old code did. Is the testing for pointer types
8178 If either SRC is readonly or length is 1, we can use memcpy. */
8179 if (!dest_align || !src_align)
8181 if (readonly_data_expr (src)
8182 || (host_integerp (len, 1)
8183 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8184 >= tree_low_cst (len, 1))))
8186 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8189 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8192 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8193 srcvar = build_fold_indirect_ref_loc (loc, src);
8194 destvar = build_fold_indirect_ref_loc (loc, dest);
8196 && !TREE_THIS_VOLATILE (srcvar)
8198 && !TREE_THIS_VOLATILE (destvar))
8200 tree src_base, dest_base, fn;
8201 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8202 HOST_WIDE_INT size = -1;
8203 HOST_WIDE_INT maxsize = -1;
8206 if (handled_component_p (src_base))
8207 src_base = get_ref_base_and_extent (src_base, &src_offset,
8209 dest_base = destvar;
8210 if (handled_component_p (dest_base))
8211 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8213 if (host_integerp (len, 1))
8215 maxsize = tree_low_cst (len, 1);
8217 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8220 maxsize *= BITS_PER_UNIT;
8224 if (SSA_VAR_P (src_base)
8225 && SSA_VAR_P (dest_base))
8227 if (operand_equal_p (src_base, dest_base, 0)
8228 && ranges_overlap_p (src_offset, maxsize,
8229 dest_offset, maxsize))
8232 else if (TREE_CODE (src_base) == INDIRECT_REF
8233 && TREE_CODE (dest_base) == INDIRECT_REF)
8235 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8236 TREE_OPERAND (dest_base, 0), 0)
8237 || ranges_overlap_p (src_offset, maxsize,
8238 dest_offset, maxsize))
8244 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8247 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8252 if (!host_integerp (len, 0))
8255 This logic lose for arguments like (type *)malloc (sizeof (type)),
8256 since we strip the casts of up to VOID return value from malloc.
8257 Perhaps we ought to inherit type from non-VOID argument here? */
8260 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8261 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8263 tree tem = TREE_OPERAND (src, 0);
8265 if (tem != TREE_OPERAND (src, 0))
8266 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8268 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8270 tree tem = TREE_OPERAND (dest, 0);
8272 if (tem != TREE_OPERAND (dest, 0))
8273 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8275 srctype = TREE_TYPE (TREE_TYPE (src));
8277 && TREE_CODE (srctype) == ARRAY_TYPE
8278 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8280 srctype = TREE_TYPE (srctype);
8282 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8284 desttype = TREE_TYPE (TREE_TYPE (dest));
8286 && TREE_CODE (desttype) == ARRAY_TYPE
8287 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8289 desttype = TREE_TYPE (desttype);
8291 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8293 if (!srctype || !desttype
8294 || !TYPE_SIZE_UNIT (srctype)
8295 || !TYPE_SIZE_UNIT (desttype)
8296 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8297 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8298 || TYPE_VOLATILE (srctype)
8299 || TYPE_VOLATILE (desttype))
8302 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8303 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8304 if (dest_align < (int) TYPE_ALIGN (desttype)
8305 || src_align < (int) TYPE_ALIGN (srctype))
8309 dest = builtin_save_expr (dest);
8312 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8314 srcvar = build_fold_indirect_ref_loc (loc, src);
8315 if (TREE_THIS_VOLATILE (srcvar))
8317 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8319 /* With memcpy, it is possible to bypass aliasing rules, so without
8320 this check i.e. execute/20060930-2.c would be misoptimized,
8321 because it use conflicting alias set to hold argument for the
8322 memcpy call. This check is probably unnecessary with
8323 -fno-strict-aliasing. Similarly for destvar. See also
8325 else if (!var_decl_component_p (srcvar))
8329 destvar = NULL_TREE;
8330 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8332 destvar = build_fold_indirect_ref_loc (loc, dest);
8333 if (TREE_THIS_VOLATILE (destvar))
8335 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8336 destvar = NULL_TREE;
8337 else if (!var_decl_component_p (destvar))
8338 destvar = NULL_TREE;
8341 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8344 if (srcvar == NULL_TREE)
8347 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8350 srctype = build_qualified_type (desttype, 0);
8351 if (src_align < (int) TYPE_ALIGN (srctype))
8353 if (AGGREGATE_TYPE_P (srctype)
8354 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8357 srctype = build_variant_type_copy (srctype);
8358 TYPE_ALIGN (srctype) = src_align;
8359 TYPE_USER_ALIGN (srctype) = 1;
8360 TYPE_PACKED (srctype) = 1;
8362 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8363 src = fold_convert_loc (loc, srcptype, src);
8364 srcvar = build_fold_indirect_ref_loc (loc, src);
8366 else if (destvar == NULL_TREE)
8369 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8372 desttype = build_qualified_type (srctype, 0);
8373 if (dest_align < (int) TYPE_ALIGN (desttype))
8375 if (AGGREGATE_TYPE_P (desttype)
8376 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8379 desttype = build_variant_type_copy (desttype);
8380 TYPE_ALIGN (desttype) = dest_align;
8381 TYPE_USER_ALIGN (desttype) = 1;
8382 TYPE_PACKED (desttype) = 1;
8384 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8385 dest = fold_convert_loc (loc, destptype, dest);
8386 destvar = build_fold_indirect_ref_loc (loc, dest);
8389 if (srctype == desttype
8390 || (gimple_in_ssa_p (cfun)
8391 && useless_type_conversion_p (desttype, srctype)))
8393 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8394 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8395 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8396 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8397 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8399 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8400 TREE_TYPE (destvar), srcvar);
8401 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8407 if (endp == 0 || endp == 3)
8408 return omit_one_operand_loc (loc, type, dest, expr);
8414 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8417 len = fold_convert_loc (loc, sizetype, len);
8418 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8419 dest = fold_convert_loc (loc, type, dest);
8421 dest = omit_one_operand_loc (loc, type, dest, expr);
8425 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8426 If LEN is not NULL, it represents the length of the string to be
8427 copied. Return NULL_TREE if no simplification can be made. */
8430 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8434 if (!validate_arg (dest, POINTER_TYPE)
8435 || !validate_arg (src, POINTER_TYPE))
8438 /* If SRC and DEST are the same (and not volatile), return DEST. */
8439 if (operand_equal_p (src, dest, 0))
8440 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8442 if (optimize_function_for_size_p (cfun))
8445 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8451 len = c_strlen (src, 1);
8452 if (! len || TREE_SIDE_EFFECTS (len))
8456 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8457 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8458 build_call_expr_loc (loc, fn, 3, dest, src, len));
8461 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8462 Return NULL_TREE if no simplification can be made. */
8465 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8467 tree fn, len, lenp1, call, type;
8469 if (!validate_arg (dest, POINTER_TYPE)
8470 || !validate_arg (src, POINTER_TYPE))
8473 len = c_strlen (src, 1);
8475 || TREE_CODE (len) != INTEGER_CST)
8478 if (optimize_function_for_size_p (cfun)
8479 /* If length is zero it's small enough. */
8480 && !integer_zerop (len))
8483 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8487 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8488 /* We use dest twice in building our expression. Save it from
8489 multiple expansions. */
8490 dest = builtin_save_expr (dest);
8491 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8493 type = TREE_TYPE (TREE_TYPE (fndecl));
8494 len = fold_convert_loc (loc, sizetype, len);
8495 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8496 dest = fold_convert_loc (loc, type, dest);
8497 dest = omit_one_operand_loc (loc, type, dest, call);
8501 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8502 If SLEN is not NULL, it represents the length of the source string.
8503 Return NULL_TREE if no simplification can be made. */
8506 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8507 tree src, tree len, tree slen)
8511 if (!validate_arg (dest, POINTER_TYPE)
8512 || !validate_arg (src, POINTER_TYPE)
8513 || !validate_arg (len, INTEGER_TYPE))
8516 /* If the LEN parameter is zero, return DEST. */
8517 if (integer_zerop (len))
8518 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8520 /* We can't compare slen with len as constants below if len is not a
8522 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8526 slen = c_strlen (src, 1);
8528 /* Now, we must be passed a constant src ptr parameter. */
8529 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8532 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8534 /* We do not support simplification of this case, though we do
8535 support it when expanding trees into RTL. */
8536 /* FIXME: generate a call to __builtin_memset. */
8537 if (tree_int_cst_lt (slen, len))
8540 /* OK transform into builtin memcpy. */
8541 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8544 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8545 build_call_expr_loc (loc, fn, 3, dest, src, len));
8548 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8549 arguments to the call, and TYPE is its return type.
8550 Return NULL_TREE if no simplification can be made. */
8553 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8555 if (!validate_arg (arg1, POINTER_TYPE)
8556 || !validate_arg (arg2, INTEGER_TYPE)
8557 || !validate_arg (len, INTEGER_TYPE))
8563 if (TREE_CODE (arg2) != INTEGER_CST
8564 || !host_integerp (len, 1))
8567 p1 = c_getstr (arg1);
8568 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8574 if (target_char_cast (arg2, &c))
8577 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8580 return build_int_cst (TREE_TYPE (arg1), 0);
8582 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8584 return fold_convert_loc (loc, type, tem);
8590 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8591 Return NULL_TREE if no simplification can be made. */
8594 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8596 const char *p1, *p2;
8598 if (!validate_arg (arg1, POINTER_TYPE)
8599 || !validate_arg (arg2, POINTER_TYPE)
8600 || !validate_arg (len, INTEGER_TYPE))
8603 /* If the LEN parameter is zero, return zero. */
8604 if (integer_zerop (len))
8605 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8608 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8609 if (operand_equal_p (arg1, arg2, 0))
8610 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8612 p1 = c_getstr (arg1);
8613 p2 = c_getstr (arg2);
8615 /* If all arguments are constant, and the value of len is not greater
8616 than the lengths of arg1 and arg2, evaluate at compile-time. */
8617 if (host_integerp (len, 1) && p1 && p2
8618 && compare_tree_int (len, strlen (p1) + 1) <= 0
8619 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8621 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8624 return integer_one_node;
8626 return integer_minus_one_node;
8628 return integer_zero_node;
8631 /* If len parameter is one, return an expression corresponding to
8632 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8633 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8635 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8636 tree cst_uchar_ptr_node
8637 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8640 = fold_convert_loc (loc, integer_type_node,
8641 build1 (INDIRECT_REF, cst_uchar_node,
8642 fold_convert_loc (loc,
8646 = fold_convert_loc (loc, integer_type_node,
8647 build1 (INDIRECT_REF, cst_uchar_node,
8648 fold_convert_loc (loc,
8651 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8657 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8658 Return NULL_TREE if no simplification can be made. */
8661 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8663 const char *p1, *p2;
8665 if (!validate_arg (arg1, POINTER_TYPE)
8666 || !validate_arg (arg2, POINTER_TYPE))
8669 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8670 if (operand_equal_p (arg1, arg2, 0))
8671 return integer_zero_node;
8673 p1 = c_getstr (arg1);
8674 p2 = c_getstr (arg2);
8678 const int i = strcmp (p1, p2);
8680 return integer_minus_one_node;
8682 return integer_one_node;
8684 return integer_zero_node;
8687 /* If the second arg is "", return *(const unsigned char*)arg1. */
8688 if (p2 && *p2 == '\0')
8690 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8691 tree cst_uchar_ptr_node
8692 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8694 return fold_convert_loc (loc, integer_type_node,
8695 build1 (INDIRECT_REF, cst_uchar_node,
8696 fold_convert_loc (loc,
8701 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8702 if (p1 && *p1 == '\0')
8704 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8705 tree cst_uchar_ptr_node
8706 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8709 = fold_convert_loc (loc, integer_type_node,
8710 build1 (INDIRECT_REF, cst_uchar_node,
8711 fold_convert_loc (loc,
8714 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8720 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8721 Return NULL_TREE if no simplification can be made. */
8724 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8726 const char *p1, *p2;
8728 if (!validate_arg (arg1, POINTER_TYPE)
8729 || !validate_arg (arg2, POINTER_TYPE)
8730 || !validate_arg (len, INTEGER_TYPE))
8733 /* If the LEN parameter is zero, return zero. */
8734 if (integer_zerop (len))
8735 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8738 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8739 if (operand_equal_p (arg1, arg2, 0))
8740 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8742 p1 = c_getstr (arg1);
8743 p2 = c_getstr (arg2);
8745 if (host_integerp (len, 1) && p1 && p2)
8747 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8749 return integer_one_node;
8751 return integer_minus_one_node;
8753 return integer_zero_node;
8756 /* If the second arg is "", and the length is greater than zero,
8757 return *(const unsigned char*)arg1. */
8758 if (p2 && *p2 == '\0'
8759 && TREE_CODE (len) == INTEGER_CST
8760 && tree_int_cst_sgn (len) == 1)
8762 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8763 tree cst_uchar_ptr_node
8764 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8766 return fold_convert_loc (loc, integer_type_node,
8767 build1 (INDIRECT_REF, cst_uchar_node,
8768 fold_convert_loc (loc,
8773 /* If the first arg is "", and the length is greater than zero,
8774 return -*(const unsigned char*)arg2. */
8775 if (p1 && *p1 == '\0'
8776 && TREE_CODE (len) == INTEGER_CST
8777 && tree_int_cst_sgn (len) == 1)
8779 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8780 tree cst_uchar_ptr_node
8781 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8783 tree temp = fold_convert_loc (loc, integer_type_node,
8784 build1 (INDIRECT_REF, cst_uchar_node,
8785 fold_convert_loc (loc,
8788 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8791 /* If len parameter is one, return an expression corresponding to
8792 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8793 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8795 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8796 tree cst_uchar_ptr_node
8797 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8799 tree ind1 = fold_convert_loc (loc, integer_type_node,
8800 build1 (INDIRECT_REF, cst_uchar_node,
8801 fold_convert_loc (loc,
8804 tree ind2 = fold_convert_loc (loc, integer_type_node,
8805 build1 (INDIRECT_REF, cst_uchar_node,
8806 fold_convert_loc (loc,
8809 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8815 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8816 ARG. Return NULL_TREE if no simplification can be made. */
8819 fold_builtin_signbit (location_t loc, tree arg, tree type)
8823 if (!validate_arg (arg, REAL_TYPE))
8826 /* If ARG is a compile-time constant, determine the result. */
8827 if (TREE_CODE (arg) == REAL_CST
8828 && !TREE_OVERFLOW (arg))
8832 c = TREE_REAL_CST (arg);
8833 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8834 return fold_convert_loc (loc, type, temp);
8837 /* If ARG is non-negative, the result is always zero. */
8838 if (tree_expr_nonnegative_p (arg))
8839 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8841 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8842 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8843 return fold_build2_loc (loc, LT_EXPR, type, arg,
8844 build_real (TREE_TYPE (arg), dconst0));
8849 /* Fold function call to builtin copysign, copysignf or copysignl with
8850 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8854 fold_builtin_copysign (location_t loc, tree fndecl,
8855 tree arg1, tree arg2, tree type)
8859 if (!validate_arg (arg1, REAL_TYPE)
8860 || !validate_arg (arg2, REAL_TYPE))
8863 /* copysign(X,X) is X. */
8864 if (operand_equal_p (arg1, arg2, 0))
8865 return fold_convert_loc (loc, type, arg1);
8867 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8868 if (TREE_CODE (arg1) == REAL_CST
8869 && TREE_CODE (arg2) == REAL_CST
8870 && !TREE_OVERFLOW (arg1)
8871 && !TREE_OVERFLOW (arg2))
8873 REAL_VALUE_TYPE c1, c2;
8875 c1 = TREE_REAL_CST (arg1);
8876 c2 = TREE_REAL_CST (arg2);
8877 /* c1.sign := c2.sign. */
8878 real_copysign (&c1, &c2);
8879 return build_real (type, c1);
8882 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8883 Remember to evaluate Y for side-effects. */
8884 if (tree_expr_nonnegative_p (arg2))
8885 return omit_one_operand_loc (loc, type,
8886 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8889 /* Strip sign changing operations for the first argument. */
8890 tem = fold_strip_sign_ops (arg1);
8892 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8897 /* Fold a call to builtin isascii with argument ARG. */
8900 fold_builtin_isascii (location_t loc, tree arg)
8902 if (!validate_arg (arg, INTEGER_TYPE))
8906 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8907 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8908 build_int_cst (NULL_TREE,
8909 ~ (unsigned HOST_WIDE_INT) 0x7f));
8910 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8911 arg, integer_zero_node);
8915 /* Fold a call to builtin toascii with argument ARG. */
8918 fold_builtin_toascii (location_t loc, tree arg)
8920 if (!validate_arg (arg, INTEGER_TYPE))
8923 /* Transform toascii(c) -> (c & 0x7f). */
8924 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8925 build_int_cst (NULL_TREE, 0x7f));
8928 /* Fold a call to builtin isdigit with argument ARG. */
8931 fold_builtin_isdigit (location_t loc, tree arg)
8933 if (!validate_arg (arg, INTEGER_TYPE))
8937 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8938 /* According to the C standard, isdigit is unaffected by locale.
8939 However, it definitely is affected by the target character set. */
8940 unsigned HOST_WIDE_INT target_digit0
8941 = lang_hooks.to_target_charset ('0');
8943 if (target_digit0 == 0)
8946 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8947 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8948 build_int_cst (unsigned_type_node, target_digit0));
8949 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8950 build_int_cst (unsigned_type_node, 9));
8954 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8957 fold_builtin_fabs (location_t loc, tree arg, tree type)
8959 if (!validate_arg (arg, REAL_TYPE))
8962 arg = fold_convert_loc (loc, type, arg);
8963 if (TREE_CODE (arg) == REAL_CST)
8964 return fold_abs_const (arg, type);
8965 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8968 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8971 fold_builtin_abs (location_t loc, tree arg, tree type)
8973 if (!validate_arg (arg, INTEGER_TYPE))
8976 arg = fold_convert_loc (loc, type, arg);
8977 if (TREE_CODE (arg) == INTEGER_CST)
8978 return fold_abs_const (arg, type);
8979 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8982 /* Fold a call to builtin fmin or fmax. */
8985 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8986 tree type, bool max)
8988 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8990 /* Calculate the result when the argument is a constant. */
8991 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8996 /* If either argument is NaN, return the other one. Avoid the
8997 transformation if we get (and honor) a signalling NaN. Using
8998 omit_one_operand() ensures we create a non-lvalue. */
8999 if (TREE_CODE (arg0) == REAL_CST
9000 && real_isnan (&TREE_REAL_CST (arg0))
9001 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9002 || ! TREE_REAL_CST (arg0).signalling))
9003 return omit_one_operand_loc (loc, type, arg1, arg0);
9004 if (TREE_CODE (arg1) == REAL_CST
9005 && real_isnan (&TREE_REAL_CST (arg1))
9006 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9007 || ! TREE_REAL_CST (arg1).signalling))
9008 return omit_one_operand_loc (loc, type, arg0, arg1);
9010 /* Transform fmin/fmax(x,x) -> x. */
9011 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9012 return omit_one_operand_loc (loc, type, arg0, arg1);
9014 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9015 functions to return the numeric arg if the other one is NaN.
9016 These tree codes don't honor that, so only transform if
9017 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9018 handled, so we don't have to worry about it either. */
9019 if (flag_finite_math_only)
9020 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9021 fold_convert_loc (loc, type, arg0),
9022 fold_convert_loc (loc, type, arg1));
9027 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9030 fold_builtin_carg (location_t loc, tree arg, tree type)
9032 if (validate_arg (arg, COMPLEX_TYPE)
9033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9035 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9039 tree new_arg = builtin_save_expr (arg);
9040 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9041 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9042 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9049 /* Fold a call to builtin logb/ilogb. */
9052 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9054 if (! validate_arg (arg, REAL_TYPE))
9059 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9061 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9067 /* If arg is Inf or NaN and we're logb, return it. */
9068 if (TREE_CODE (rettype) == REAL_TYPE)
9069 return fold_convert_loc (loc, rettype, arg);
9070 /* Fall through... */
9072 /* Zero may set errno and/or raise an exception for logb, also
9073 for ilogb we don't know FP_ILOGB0. */
9076 /* For normal numbers, proceed iff radix == 2. In GCC,
9077 normalized significands are in the range [0.5, 1.0). We
9078 want the exponent as if they were [1.0, 2.0) so get the
9079 exponent and subtract 1. */
9080 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9081 return fold_convert_loc (loc, rettype,
9082 build_int_cst (NULL_TREE,
9083 REAL_EXP (value)-1));
9091 /* Fold a call to builtin significand, if radix == 2. */
9094 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9096 if (! validate_arg (arg, REAL_TYPE))
9101 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9103 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9110 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9111 return fold_convert_loc (loc, rettype, arg);
9113 /* For normal numbers, proceed iff radix == 2. */
9114 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9116 REAL_VALUE_TYPE result = *value;
9117 /* In GCC, normalized significands are in the range [0.5,
9118 1.0). We want them to be [1.0, 2.0) so set the
9120 SET_REAL_EXP (&result, 1);
9121 return build_real (rettype, result);
9130 /* Fold a call to builtin frexp, we can assume the base is 2. */
9133 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9135 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9140 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9143 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9145 /* Proceed if a valid pointer type was passed in. */
9146 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9148 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9154 /* For +-0, return (*exp = 0, +-0). */
9155 exp = integer_zero_node;
9160 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9161 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9164 /* Since the frexp function always expects base 2, and in
9165 GCC normalized significands are already in the range
9166 [0.5, 1.0), we have exactly what frexp wants. */
9167 REAL_VALUE_TYPE frac_rvt = *value;
9168 SET_REAL_EXP (&frac_rvt, 0);
9169 frac = build_real (rettype, frac_rvt);
9170 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9177 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9178 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9179 TREE_SIDE_EFFECTS (arg1) = 1;
9180 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9186 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9187 then we can assume the base is two. If it's false, then we have to
9188 check the mode of the TYPE parameter in certain cases. */
9191 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9192 tree type, bool ldexp)
9194 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9199 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9200 if (real_zerop (arg0) || integer_zerop (arg1)
9201 || (TREE_CODE (arg0) == REAL_CST
9202 && !real_isfinite (&TREE_REAL_CST (arg0))))
9203 return omit_one_operand_loc (loc, type, arg0, arg1);
9205 /* If both arguments are constant, then try to evaluate it. */
9206 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9207 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9208 && host_integerp (arg1, 0))
9210 /* Bound the maximum adjustment to twice the range of the
9211 mode's valid exponents. Use abs to ensure the range is
9212 positive as a sanity check. */
9213 const long max_exp_adj = 2 *
9214 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9215 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9217 /* Get the user-requested adjustment. */
9218 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9220 /* The requested adjustment must be inside this range. This
9221 is a preliminary cap to avoid things like overflow, we
9222 may still fail to compute the result for other reasons. */
9223 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9225 REAL_VALUE_TYPE initial_result;
9227 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9229 /* Ensure we didn't overflow. */
9230 if (! real_isinf (&initial_result))
9232 const REAL_VALUE_TYPE trunc_result
9233 = real_value_truncate (TYPE_MODE (type), initial_result);
9235 /* Only proceed if the target mode can hold the
9237 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9238 return build_real (type, trunc_result);
9247 /* Fold a call to builtin modf. */
9250 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9252 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9257 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9260 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9262 /* Proceed if a valid pointer type was passed in. */
9263 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9265 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9266 REAL_VALUE_TYPE trunc, frac;
9272 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9273 trunc = frac = *value;
9276 /* For +-Inf, return (*arg1 = arg0, +-0). */
9278 frac.sign = value->sign;
9282 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9283 real_trunc (&trunc, VOIDmode, value);
9284 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9285 /* If the original number was negative and already
9286 integral, then the fractional part is -0.0. */
9287 if (value->sign && frac.cl == rvc_zero)
9288 frac.sign = value->sign;
9292 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9293 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9294 build_real (rettype, trunc));
9295 TREE_SIDE_EFFECTS (arg1) = 1;
9296 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9297 build_real (rettype, frac));
9303 /* Given a location LOC, an interclass builtin function decl FNDECL
9304 and its single argument ARG, return an folded expression computing
9305 the same, or NULL_TREE if we either couldn't or didn't want to fold
9306 (the latter happen if there's an RTL instruction available). */
9309 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9311 enum machine_mode mode;
9313 if (!validate_arg (arg, REAL_TYPE))
9316 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9319 mode = TYPE_MODE (TREE_TYPE (arg));
9321 /* If there is no optab, try generic code. */
9322 switch (DECL_FUNCTION_CODE (fndecl))
9326 CASE_FLT_FN (BUILT_IN_ISINF):
9328 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9329 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9330 tree const type = TREE_TYPE (arg);
9334 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9335 real_from_string (&r, buf);
9336 result = build_call_expr (isgr_fn, 2,
9337 fold_build1_loc (loc, ABS_EXPR, type, arg),
9338 build_real (type, r));
9341 CASE_FLT_FN (BUILT_IN_FINITE):
9342 case BUILT_IN_ISFINITE:
9344 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9345 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9346 tree const type = TREE_TYPE (arg);
9350 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9351 real_from_string (&r, buf);
9352 result = build_call_expr (isle_fn, 2,
9353 fold_build1_loc (loc, ABS_EXPR, type, arg),
9354 build_real (type, r));
9355 /*result = fold_build2_loc (loc, UNGT_EXPR,
9356 TREE_TYPE (TREE_TYPE (fndecl)),
9357 fold_build1_loc (loc, ABS_EXPR, type, arg),
9358 build_real (type, r));
9359 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9360 TREE_TYPE (TREE_TYPE (fndecl)),
9364 case BUILT_IN_ISNORMAL:
9366 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9367 islessequal(fabs(x),DBL_MAX). */
9368 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9369 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9370 tree const type = TREE_TYPE (arg);
9371 REAL_VALUE_TYPE rmax, rmin;
9374 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9375 real_from_string (&rmax, buf);
9376 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9377 real_from_string (&rmin, buf);
9378 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9379 result = build_call_expr (isle_fn, 2, arg,
9380 build_real (type, rmax));
9381 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9382 build_call_expr (isge_fn, 2, arg,
9383 build_real (type, rmin)));
9393 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9394 ARG is the argument for the call. */
9397 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9399 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9402 if (!validate_arg (arg, REAL_TYPE))
9405 switch (builtin_index)
9407 case BUILT_IN_ISINF:
9408 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9409 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9411 if (TREE_CODE (arg) == REAL_CST)
9413 r = TREE_REAL_CST (arg);
9414 if (real_isinf (&r))
9415 return real_compare (GT_EXPR, &r, &dconst0)
9416 ? integer_one_node : integer_minus_one_node;
9418 return integer_zero_node;
9423 case BUILT_IN_ISINF_SIGN:
9425 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9426 /* In a boolean context, GCC will fold the inner COND_EXPR to
9427 1. So e.g. "if (isinf_sign(x))" would be folded to just
9428 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9429 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9430 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9431 tree tmp = NULL_TREE;
9433 arg = builtin_save_expr (arg);
9435 if (signbit_fn && isinf_fn)
9437 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9438 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9440 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9441 signbit_call, integer_zero_node);
9442 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9443 isinf_call, integer_zero_node);
9445 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9446 integer_minus_one_node, integer_one_node);
9447 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9455 case BUILT_IN_ISFINITE:
9456 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9457 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9458 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9460 if (TREE_CODE (arg) == REAL_CST)
9462 r = TREE_REAL_CST (arg);
9463 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9468 case BUILT_IN_ISNAN:
9469 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9470 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9472 if (TREE_CODE (arg) == REAL_CST)
9474 r = TREE_REAL_CST (arg);
9475 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9478 arg = builtin_save_expr (arg);
9479 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9486 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9487 This builtin will generate code to return the appropriate floating
9488 point classification depending on the value of the floating point
9489 number passed in. The possible return values must be supplied as
9490 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9491 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9492 one floating point argument which is "type generic". */
9495 fold_builtin_fpclassify (location_t loc, tree exp)
9497 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9498 arg, type, res, tmp;
9499 enum machine_mode mode;
9503 /* Verify the required arguments in the original call. */
9504 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9505 INTEGER_TYPE, INTEGER_TYPE,
9506 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9509 fp_nan = CALL_EXPR_ARG (exp, 0);
9510 fp_infinite = CALL_EXPR_ARG (exp, 1);
9511 fp_normal = CALL_EXPR_ARG (exp, 2);
9512 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9513 fp_zero = CALL_EXPR_ARG (exp, 4);
9514 arg = CALL_EXPR_ARG (exp, 5);
9515 type = TREE_TYPE (arg);
9516 mode = TYPE_MODE (type);
9517 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9521 (fabs(x) == Inf ? FP_INFINITE :
9522 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9523 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9525 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9526 build_real (type, dconst0));
9527 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9528 tmp, fp_zero, fp_subnormal);
9530 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9531 real_from_string (&r, buf);
9532 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9533 arg, build_real (type, r));
9534 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9536 if (HONOR_INFINITIES (mode))
9539 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9540 build_real (type, r));
9541 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9545 if (HONOR_NANS (mode))
9547 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9548 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9554 /* Fold a call to an unordered comparison function such as
9555 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9556 being called and ARG0 and ARG1 are the arguments for the call.
9557 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9558 the opposite of the desired result. UNORDERED_CODE is used
9559 for modes that can hold NaNs and ORDERED_CODE is used for
9563 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9564 enum tree_code unordered_code,
9565 enum tree_code ordered_code)
9567 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9568 enum tree_code code;
9570 enum tree_code code0, code1;
9571 tree cmp_type = NULL_TREE;
9573 type0 = TREE_TYPE (arg0);
9574 type1 = TREE_TYPE (arg1);
9576 code0 = TREE_CODE (type0);
9577 code1 = TREE_CODE (type1);
9579 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9580 /* Choose the wider of two real types. */
9581 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9583 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9585 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9588 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9589 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9591 if (unordered_code == UNORDERED_EXPR)
9593 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9594 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9595 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9598 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9600 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9601 fold_build2_loc (loc, code, type, arg0, arg1));
9604 /* Fold a call to built-in function FNDECL with 0 arguments.
9605 IGNORE is true if the result of the function call is ignored. This
9606 function returns NULL_TREE if no simplification was possible. */
9609 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9611 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9612 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9615 CASE_FLT_FN (BUILT_IN_INF):
9616 case BUILT_IN_INFD32:
9617 case BUILT_IN_INFD64:
9618 case BUILT_IN_INFD128:
9619 return fold_builtin_inf (loc, type, true);
9621 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9622 return fold_builtin_inf (loc, type, false);
9624 case BUILT_IN_CLASSIFY_TYPE:
9625 return fold_builtin_classify_type (NULL_TREE);
9633 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9634 IGNORE is true if the result of the function call is ignored. This
9635 function returns NULL_TREE if no simplification was possible. */
9638 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9640 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9641 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9645 case BUILT_IN_CONSTANT_P:
9647 tree val = fold_builtin_constant_p (arg0);
9649 /* Gimplification will pull the CALL_EXPR for the builtin out of
9650 an if condition. When not optimizing, we'll not CSE it back.
9651 To avoid link error types of regressions, return false now. */
9652 if (!val && !optimize)
9653 val = integer_zero_node;
9658 case BUILT_IN_CLASSIFY_TYPE:
9659 return fold_builtin_classify_type (arg0);
9661 case BUILT_IN_STRLEN:
9662 return fold_builtin_strlen (loc, arg0);
9664 CASE_FLT_FN (BUILT_IN_FABS):
9665 return fold_builtin_fabs (loc, arg0, type);
9669 case BUILT_IN_LLABS:
9670 case BUILT_IN_IMAXABS:
9671 return fold_builtin_abs (loc, arg0, type);
9673 CASE_FLT_FN (BUILT_IN_CONJ):
9674 if (validate_arg (arg0, COMPLEX_TYPE)
9675 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9676 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9679 CASE_FLT_FN (BUILT_IN_CREAL):
9680 if (validate_arg (arg0, COMPLEX_TYPE)
9681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9682 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9685 CASE_FLT_FN (BUILT_IN_CIMAG):
9686 if (validate_arg (arg0, COMPLEX_TYPE)
9687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9688 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9691 CASE_FLT_FN (BUILT_IN_CCOS):
9692 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9694 CASE_FLT_FN (BUILT_IN_CCOSH):
9695 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9697 CASE_FLT_FN (BUILT_IN_CSIN):
9698 if (validate_arg (arg0, COMPLEX_TYPE)
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9700 return do_mpc_arg1 (arg0, type, mpc_sin);
9703 CASE_FLT_FN (BUILT_IN_CSINH):
9704 if (validate_arg (arg0, COMPLEX_TYPE)
9705 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9706 return do_mpc_arg1 (arg0, type, mpc_sinh);
9709 CASE_FLT_FN (BUILT_IN_CTAN):
9710 if (validate_arg (arg0, COMPLEX_TYPE)
9711 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9712 return do_mpc_arg1 (arg0, type, mpc_tan);
9715 CASE_FLT_FN (BUILT_IN_CTANH):
9716 if (validate_arg (arg0, COMPLEX_TYPE)
9717 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9718 return do_mpc_arg1 (arg0, type, mpc_tanh);
9721 CASE_FLT_FN (BUILT_IN_CLOG):
9722 if (validate_arg (arg0, COMPLEX_TYPE)
9723 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724 return do_mpc_arg1 (arg0, type, mpc_log);
9727 CASE_FLT_FN (BUILT_IN_CSQRT):
9728 if (validate_arg (arg0, COMPLEX_TYPE)
9729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9733 CASE_FLT_FN (BUILT_IN_CASIN):
9734 if (validate_arg (arg0, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736 return do_mpc_arg1 (arg0, type, mpc_asin);
9739 CASE_FLT_FN (BUILT_IN_CACOS):
9740 if (validate_arg (arg0, COMPLEX_TYPE)
9741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9742 return do_mpc_arg1 (arg0, type, mpc_acos);
9745 CASE_FLT_FN (BUILT_IN_CATAN):
9746 if (validate_arg (arg0, COMPLEX_TYPE)
9747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9748 return do_mpc_arg1 (arg0, type, mpc_atan);
9751 CASE_FLT_FN (BUILT_IN_CASINH):
9752 if (validate_arg (arg0, COMPLEX_TYPE)
9753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9754 return do_mpc_arg1 (arg0, type, mpc_asinh);
9757 CASE_FLT_FN (BUILT_IN_CACOSH):
9758 if (validate_arg (arg0, COMPLEX_TYPE)
9759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9760 return do_mpc_arg1 (arg0, type, mpc_acosh);
9763 CASE_FLT_FN (BUILT_IN_CATANH):
9764 if (validate_arg (arg0, COMPLEX_TYPE)
9765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9766 return do_mpc_arg1 (arg0, type, mpc_atanh);
9769 CASE_FLT_FN (BUILT_IN_CABS):
9770 return fold_builtin_cabs (loc, arg0, type, fndecl);
9772 CASE_FLT_FN (BUILT_IN_CARG):
9773 return fold_builtin_carg (loc, arg0, type);
9775 CASE_FLT_FN (BUILT_IN_SQRT):
9776 return fold_builtin_sqrt (loc, arg0, type);
9778 CASE_FLT_FN (BUILT_IN_CBRT):
9779 return fold_builtin_cbrt (loc, arg0, type);
9781 CASE_FLT_FN (BUILT_IN_ASIN):
9782 if (validate_arg (arg0, REAL_TYPE))
9783 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9784 &dconstm1, &dconst1, true);
9787 CASE_FLT_FN (BUILT_IN_ACOS):
9788 if (validate_arg (arg0, REAL_TYPE))
9789 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9790 &dconstm1, &dconst1, true);
9793 CASE_FLT_FN (BUILT_IN_ATAN):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9798 CASE_FLT_FN (BUILT_IN_ASINH):
9799 if (validate_arg (arg0, REAL_TYPE))
9800 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9803 CASE_FLT_FN (BUILT_IN_ACOSH):
9804 if (validate_arg (arg0, REAL_TYPE))
9805 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9806 &dconst1, NULL, true);
9809 CASE_FLT_FN (BUILT_IN_ATANH):
9810 if (validate_arg (arg0, REAL_TYPE))
9811 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9812 &dconstm1, &dconst1, false);
9815 CASE_FLT_FN (BUILT_IN_SIN):
9816 if (validate_arg (arg0, REAL_TYPE))
9817 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9820 CASE_FLT_FN (BUILT_IN_COS):
9821 return fold_builtin_cos (loc, arg0, type, fndecl);
9823 CASE_FLT_FN (BUILT_IN_TAN):
9824 return fold_builtin_tan (arg0, type);
9826 CASE_FLT_FN (BUILT_IN_CEXP):
9827 return fold_builtin_cexp (loc, arg0, type);
9829 CASE_FLT_FN (BUILT_IN_CEXPI):
9830 if (validate_arg (arg0, REAL_TYPE))
9831 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9834 CASE_FLT_FN (BUILT_IN_SINH):
9835 if (validate_arg (arg0, REAL_TYPE))
9836 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9839 CASE_FLT_FN (BUILT_IN_COSH):
9840 return fold_builtin_cosh (loc, arg0, type, fndecl);
9842 CASE_FLT_FN (BUILT_IN_TANH):
9843 if (validate_arg (arg0, REAL_TYPE))
9844 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9847 CASE_FLT_FN (BUILT_IN_ERF):
9848 if (validate_arg (arg0, REAL_TYPE))
9849 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9852 CASE_FLT_FN (BUILT_IN_ERFC):
9853 if (validate_arg (arg0, REAL_TYPE))
9854 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9857 CASE_FLT_FN (BUILT_IN_TGAMMA):
9858 if (validate_arg (arg0, REAL_TYPE))
9859 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9862 CASE_FLT_FN (BUILT_IN_EXP):
9863 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9865 CASE_FLT_FN (BUILT_IN_EXP2):
9866 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9868 CASE_FLT_FN (BUILT_IN_EXP10):
9869 CASE_FLT_FN (BUILT_IN_POW10):
9870 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9872 CASE_FLT_FN (BUILT_IN_EXPM1):
9873 if (validate_arg (arg0, REAL_TYPE))
9874 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9877 CASE_FLT_FN (BUILT_IN_LOG):
9878 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9880 CASE_FLT_FN (BUILT_IN_LOG2):
9881 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9883 CASE_FLT_FN (BUILT_IN_LOG10):
9884 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9886 CASE_FLT_FN (BUILT_IN_LOG1P):
9887 if (validate_arg (arg0, REAL_TYPE))
9888 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9889 &dconstm1, NULL, false);
9892 CASE_FLT_FN (BUILT_IN_J0):
9893 if (validate_arg (arg0, REAL_TYPE))
9894 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9898 CASE_FLT_FN (BUILT_IN_J1):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9904 CASE_FLT_FN (BUILT_IN_Y0):
9905 if (validate_arg (arg0, REAL_TYPE))
9906 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9907 &dconst0, NULL, false);
9910 CASE_FLT_FN (BUILT_IN_Y1):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9913 &dconst0, NULL, false);
9916 CASE_FLT_FN (BUILT_IN_NAN):
9917 case BUILT_IN_NAND32:
9918 case BUILT_IN_NAND64:
9919 case BUILT_IN_NAND128:
9920 return fold_builtin_nan (arg0, type, true);
9922 CASE_FLT_FN (BUILT_IN_NANS):
9923 return fold_builtin_nan (arg0, type, false);
9925 CASE_FLT_FN (BUILT_IN_FLOOR):
9926 return fold_builtin_floor (loc, fndecl, arg0);
9928 CASE_FLT_FN (BUILT_IN_CEIL):
9929 return fold_builtin_ceil (loc, fndecl, arg0);
9931 CASE_FLT_FN (BUILT_IN_TRUNC):
9932 return fold_builtin_trunc (loc, fndecl, arg0);
9934 CASE_FLT_FN (BUILT_IN_ROUND):
9935 return fold_builtin_round (loc, fndecl, arg0);
9937 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9938 CASE_FLT_FN (BUILT_IN_RINT):
9939 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9941 CASE_FLT_FN (BUILT_IN_LCEIL):
9942 CASE_FLT_FN (BUILT_IN_LLCEIL):
9943 CASE_FLT_FN (BUILT_IN_LFLOOR):
9944 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9945 CASE_FLT_FN (BUILT_IN_LROUND):
9946 CASE_FLT_FN (BUILT_IN_LLROUND):
9947 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9949 CASE_FLT_FN (BUILT_IN_LRINT):
9950 CASE_FLT_FN (BUILT_IN_LLRINT):
9951 return fold_fixed_mathfn (loc, fndecl, arg0);
9953 case BUILT_IN_BSWAP32:
9954 case BUILT_IN_BSWAP64:
9955 return fold_builtin_bswap (fndecl, arg0);
9957 CASE_INT_FN (BUILT_IN_FFS):
9958 CASE_INT_FN (BUILT_IN_CLZ):
9959 CASE_INT_FN (BUILT_IN_CTZ):
9960 CASE_INT_FN (BUILT_IN_POPCOUNT):
9961 CASE_INT_FN (BUILT_IN_PARITY):
9962 return fold_builtin_bitop (fndecl, arg0);
9964 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9965 return fold_builtin_signbit (loc, arg0, type);
9967 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9968 return fold_builtin_significand (loc, arg0, type);
9970 CASE_FLT_FN (BUILT_IN_ILOGB):
9971 CASE_FLT_FN (BUILT_IN_LOGB):
9972 return fold_builtin_logb (loc, arg0, type);
9974 case BUILT_IN_ISASCII:
9975 return fold_builtin_isascii (loc, arg0);
9977 case BUILT_IN_TOASCII:
9978 return fold_builtin_toascii (loc, arg0);
9980 case BUILT_IN_ISDIGIT:
9981 return fold_builtin_isdigit (loc, arg0);
9983 CASE_FLT_FN (BUILT_IN_FINITE):
9984 case BUILT_IN_FINITED32:
9985 case BUILT_IN_FINITED64:
9986 case BUILT_IN_FINITED128:
9987 case BUILT_IN_ISFINITE:
9989 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9992 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9995 CASE_FLT_FN (BUILT_IN_ISINF):
9996 case BUILT_IN_ISINFD32:
9997 case BUILT_IN_ISINFD64:
9998 case BUILT_IN_ISINFD128:
10000 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10003 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10006 case BUILT_IN_ISNORMAL:
10007 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10009 case BUILT_IN_ISINF_SIGN:
10010 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10012 CASE_FLT_FN (BUILT_IN_ISNAN):
10013 case BUILT_IN_ISNAND32:
10014 case BUILT_IN_ISNAND64:
10015 case BUILT_IN_ISNAND128:
10016 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10018 case BUILT_IN_PRINTF:
10019 case BUILT_IN_PRINTF_UNLOCKED:
10020 case BUILT_IN_VPRINTF:
10021 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10031 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10032 IGNORE is true if the result of the function call is ignored. This
10033 function returns NULL_TREE if no simplification was possible. */
10036 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10038 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10039 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10043 CASE_FLT_FN (BUILT_IN_JN):
10044 if (validate_arg (arg0, INTEGER_TYPE)
10045 && validate_arg (arg1, REAL_TYPE))
10046 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10049 CASE_FLT_FN (BUILT_IN_YN):
10050 if (validate_arg (arg0, INTEGER_TYPE)
10051 && validate_arg (arg1, REAL_TYPE))
10052 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10056 CASE_FLT_FN (BUILT_IN_DREM):
10057 CASE_FLT_FN (BUILT_IN_REMAINDER):
10058 if (validate_arg (arg0, REAL_TYPE)
10059 && validate_arg(arg1, REAL_TYPE))
10060 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10063 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10064 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10065 if (validate_arg (arg0, REAL_TYPE)
10066 && validate_arg(arg1, POINTER_TYPE))
10067 return do_mpfr_lgamma_r (arg0, arg1, type);
10070 CASE_FLT_FN (BUILT_IN_ATAN2):
10071 if (validate_arg (arg0, REAL_TYPE)
10072 && validate_arg(arg1, REAL_TYPE))
10073 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10076 CASE_FLT_FN (BUILT_IN_FDIM):
10077 if (validate_arg (arg0, REAL_TYPE)
10078 && validate_arg(arg1, REAL_TYPE))
10079 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10082 CASE_FLT_FN (BUILT_IN_HYPOT):
10083 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10085 CASE_FLT_FN (BUILT_IN_CPOW):
10086 if (validate_arg (arg0, COMPLEX_TYPE)
10087 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10088 && validate_arg (arg1, COMPLEX_TYPE)
10089 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10090 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10093 CASE_FLT_FN (BUILT_IN_LDEXP):
10094 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10095 CASE_FLT_FN (BUILT_IN_SCALBN):
10096 CASE_FLT_FN (BUILT_IN_SCALBLN):
10097 return fold_builtin_load_exponent (loc, arg0, arg1,
10098 type, /*ldexp=*/false);
10100 CASE_FLT_FN (BUILT_IN_FREXP):
10101 return fold_builtin_frexp (loc, arg0, arg1, type);
10103 CASE_FLT_FN (BUILT_IN_MODF):
10104 return fold_builtin_modf (loc, arg0, arg1, type);
10106 case BUILT_IN_BZERO:
10107 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10109 case BUILT_IN_FPUTS:
10110 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10112 case BUILT_IN_FPUTS_UNLOCKED:
10113 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10115 case BUILT_IN_STRSTR:
10116 return fold_builtin_strstr (loc, arg0, arg1, type);
10118 case BUILT_IN_STRCAT:
10119 return fold_builtin_strcat (loc, arg0, arg1);
10121 case BUILT_IN_STRSPN:
10122 return fold_builtin_strspn (loc, arg0, arg1);
10124 case BUILT_IN_STRCSPN:
10125 return fold_builtin_strcspn (loc, arg0, arg1);
10127 case BUILT_IN_STRCHR:
10128 case BUILT_IN_INDEX:
10129 return fold_builtin_strchr (loc, arg0, arg1, type);
10131 case BUILT_IN_STRRCHR:
10132 case BUILT_IN_RINDEX:
10133 return fold_builtin_strrchr (loc, arg0, arg1, type);
10135 case BUILT_IN_STRCPY:
10136 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10138 case BUILT_IN_STPCPY:
10141 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10145 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10148 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10151 case BUILT_IN_STRCMP:
10152 return fold_builtin_strcmp (loc, arg0, arg1);
10154 case BUILT_IN_STRPBRK:
10155 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10157 case BUILT_IN_EXPECT:
10158 return fold_builtin_expect (loc, arg0, arg1);
10160 CASE_FLT_FN (BUILT_IN_POW):
10161 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10163 CASE_FLT_FN (BUILT_IN_POWI):
10164 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10166 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10167 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10169 CASE_FLT_FN (BUILT_IN_FMIN):
10170 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10172 CASE_FLT_FN (BUILT_IN_FMAX):
10173 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10175 case BUILT_IN_ISGREATER:
10176 return fold_builtin_unordered_cmp (loc, fndecl,
10177 arg0, arg1, UNLE_EXPR, LE_EXPR);
10178 case BUILT_IN_ISGREATEREQUAL:
10179 return fold_builtin_unordered_cmp (loc, fndecl,
10180 arg0, arg1, UNLT_EXPR, LT_EXPR);
10181 case BUILT_IN_ISLESS:
10182 return fold_builtin_unordered_cmp (loc, fndecl,
10183 arg0, arg1, UNGE_EXPR, GE_EXPR);
10184 case BUILT_IN_ISLESSEQUAL:
10185 return fold_builtin_unordered_cmp (loc, fndecl,
10186 arg0, arg1, UNGT_EXPR, GT_EXPR);
10187 case BUILT_IN_ISLESSGREATER:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10190 case BUILT_IN_ISUNORDERED:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNORDERED_EXPR,
10195 /* We do the folding for va_start in the expander. */
10196 case BUILT_IN_VA_START:
10199 case BUILT_IN_SPRINTF:
10200 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10202 case BUILT_IN_OBJECT_SIZE:
10203 return fold_builtin_object_size (arg0, arg1);
10205 case BUILT_IN_PRINTF:
10206 case BUILT_IN_PRINTF_UNLOCKED:
10207 case BUILT_IN_VPRINTF:
10208 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10210 case BUILT_IN_PRINTF_CHK:
10211 case BUILT_IN_VPRINTF_CHK:
10212 if (!validate_arg (arg0, INTEGER_TYPE)
10213 || TREE_SIDE_EFFECTS (arg0))
10216 return fold_builtin_printf (loc, fndecl,
10217 arg1, NULL_TREE, ignore, fcode);
10220 case BUILT_IN_FPRINTF:
10221 case BUILT_IN_FPRINTF_UNLOCKED:
10222 case BUILT_IN_VFPRINTF:
10223 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10232 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10233 and ARG2. IGNORE is true if the result of the function call is ignored.
10234 This function returns NULL_TREE if no simplification was possible. */
10237 fold_builtin_3 (location_t loc, tree fndecl,
10238 tree arg0, tree arg1, tree arg2, bool ignore)
10240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10245 CASE_FLT_FN (BUILT_IN_SINCOS):
10246 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10248 CASE_FLT_FN (BUILT_IN_FMA):
10249 if (validate_arg (arg0, REAL_TYPE)
10250 && validate_arg(arg1, REAL_TYPE)
10251 && validate_arg(arg2, REAL_TYPE))
10252 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10255 CASE_FLT_FN (BUILT_IN_REMQUO):
10256 if (validate_arg (arg0, REAL_TYPE)
10257 && validate_arg(arg1, REAL_TYPE)
10258 && validate_arg(arg2, POINTER_TYPE))
10259 return do_mpfr_remquo (arg0, arg1, arg2);
10262 case BUILT_IN_MEMSET:
10263 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10265 case BUILT_IN_BCOPY:
10266 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10267 void_type_node, true, /*endp=*/3);
10269 case BUILT_IN_MEMCPY:
10270 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10271 type, ignore, /*endp=*/0);
10273 case BUILT_IN_MEMPCPY:
10274 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10275 type, ignore, /*endp=*/1);
10277 case BUILT_IN_MEMMOVE:
10278 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10279 type, ignore, /*endp=*/3);
10281 case BUILT_IN_STRNCAT:
10282 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10284 case BUILT_IN_STRNCPY:
10285 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10287 case BUILT_IN_STRNCMP:
10288 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10290 case BUILT_IN_MEMCHR:
10291 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10293 case BUILT_IN_BCMP:
10294 case BUILT_IN_MEMCMP:
10295 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10297 case BUILT_IN_SPRINTF:
10298 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10300 case BUILT_IN_STRCPY_CHK:
10301 case BUILT_IN_STPCPY_CHK:
10302 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10305 case BUILT_IN_STRCAT_CHK:
10306 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10308 case BUILT_IN_PRINTF_CHK:
10309 case BUILT_IN_VPRINTF_CHK:
10310 if (!validate_arg (arg0, INTEGER_TYPE)
10311 || TREE_SIDE_EFFECTS (arg0))
10314 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10317 case BUILT_IN_FPRINTF:
10318 case BUILT_IN_FPRINTF_UNLOCKED:
10319 case BUILT_IN_VFPRINTF:
10320 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10323 case BUILT_IN_FPRINTF_CHK:
10324 case BUILT_IN_VFPRINTF_CHK:
10325 if (!validate_arg (arg1, INTEGER_TYPE)
10326 || TREE_SIDE_EFFECTS (arg1))
10329 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10338 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10339 ARG2, and ARG3. IGNORE is true if the result of the function call is
10340 ignored. This function returns NULL_TREE if no simplification was
10344 fold_builtin_4 (location_t loc, tree fndecl,
10345 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10347 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10351 case BUILT_IN_MEMCPY_CHK:
10352 case BUILT_IN_MEMPCPY_CHK:
10353 case BUILT_IN_MEMMOVE_CHK:
10354 case BUILT_IN_MEMSET_CHK:
10355 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10357 DECL_FUNCTION_CODE (fndecl));
10359 case BUILT_IN_STRNCPY_CHK:
10360 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10362 case BUILT_IN_STRNCAT_CHK:
10363 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10365 case BUILT_IN_FPRINTF_CHK:
10366 case BUILT_IN_VFPRINTF_CHK:
10367 if (!validate_arg (arg1, INTEGER_TYPE)
10368 || TREE_SIDE_EFFECTS (arg1))
10371 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10381 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10382 arguments, where NARGS <= 4. IGNORE is true if the result of the
10383 function call is ignored. This function returns NULL_TREE if no
10384 simplification was possible. Note that this only folds builtins with
10385 fixed argument patterns. Foldings that do varargs-to-varargs
10386 transformations, or that match calls with more than 4 arguments,
10387 need to be handled with fold_builtin_varargs instead. */
10389 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10392 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10394 tree ret = NULL_TREE;
10399 ret = fold_builtin_0 (loc, fndecl, ignore);
10402 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10405 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10408 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10411 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10419 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10420 SET_EXPR_LOCATION (ret, loc);
10421 TREE_NO_WARNING (ret) = 1;
10427 /* Builtins with folding operations that operate on "..." arguments
10428 need special handling; we need to store the arguments in a convenient
10429 data structure before attempting any folding. Fortunately there are
10430 only a few builtins that fall into this category. FNDECL is the
10431 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10432 result of the function call is ignored. */
10435 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10436 bool ignore ATTRIBUTE_UNUSED)
10438 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10439 tree ret = NULL_TREE;
10443 case BUILT_IN_SPRINTF_CHK:
10444 case BUILT_IN_VSPRINTF_CHK:
10445 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10448 case BUILT_IN_SNPRINTF_CHK:
10449 case BUILT_IN_VSNPRINTF_CHK:
10450 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10453 case BUILT_IN_FPCLASSIFY:
10454 ret = fold_builtin_fpclassify (loc, exp);
10462 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10463 SET_EXPR_LOCATION (ret, loc);
10464 TREE_NO_WARNING (ret) = 1;
10470 /* Return true if FNDECL shouldn't be folded right now.
10471 If a built-in function has an inline attribute always_inline
10472 wrapper, defer folding it after always_inline functions have
10473 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10474 might not be performed. */
10477 avoid_folding_inline_builtin (tree fndecl)
10479 return (DECL_DECLARED_INLINE_P (fndecl)
10480 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10482 && !cfun->always_inline_functions_inlined
10483 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10486 /* A wrapper function for builtin folding that prevents warnings for
10487 "statement without effect" and the like, caused by removing the
10488 call node earlier than the warning is generated. */
10491 fold_call_expr (location_t loc, tree exp, bool ignore)
10493 tree ret = NULL_TREE;
10494 tree fndecl = get_callee_fndecl (exp);
10496 && TREE_CODE (fndecl) == FUNCTION_DECL
10497 && DECL_BUILT_IN (fndecl)
10498 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10499 yet. Defer folding until we see all the arguments
10500 (after inlining). */
10501 && !CALL_EXPR_VA_ARG_PACK (exp))
10503 int nargs = call_expr_nargs (exp);
10505 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10506 instead last argument is __builtin_va_arg_pack (). Defer folding
10507 even in that case, until arguments are finalized. */
10508 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10510 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10512 && TREE_CODE (fndecl2) == FUNCTION_DECL
10513 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10514 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10518 if (avoid_folding_inline_builtin (fndecl))
10521 /* FIXME: Don't use a list in this interface. */
10522 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10523 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10526 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10528 tree *args = CALL_EXPR_ARGP (exp);
10529 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10532 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10540 /* Conveniently construct a function call expression. FNDECL names the
10541 function to be called and ARGLIST is a TREE_LIST of arguments. */
10544 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10546 tree fntype = TREE_TYPE (fndecl);
10547 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10548 int n = list_length (arglist);
10549 tree *argarray = (tree *) alloca (n * sizeof (tree));
10552 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10553 argarray[i] = TREE_VALUE (arglist);
10554 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10557 /* Conveniently construct a function call expression. FNDECL names the
10558 function to be called, N is the number of arguments, and the "..."
10559 parameters are the argument expressions. */
10562 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10565 tree fntype = TREE_TYPE (fndecl);
10566 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10567 tree *argarray = (tree *) alloca (n * sizeof (tree));
10571 for (i = 0; i < n; i++)
10572 argarray[i] = va_arg (ap, tree);
10574 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10577 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10578 N arguments are passed in the array ARGARRAY. */
10581 fold_builtin_call_array (location_t loc, tree type,
10586 tree ret = NULL_TREE;
10590 if (TREE_CODE (fn) == ADDR_EXPR)
10592 tree fndecl = TREE_OPERAND (fn, 0);
10593 if (TREE_CODE (fndecl) == FUNCTION_DECL
10594 && DECL_BUILT_IN (fndecl))
10596 /* If last argument is __builtin_va_arg_pack (), arguments to this
10597 function are not finalized yet. Defer folding until they are. */
10598 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10600 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10602 && TREE_CODE (fndecl2) == FUNCTION_DECL
10603 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10604 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10605 return build_call_array_loc (loc, type, fn, n, argarray);
10607 if (avoid_folding_inline_builtin (fndecl))
10608 return build_call_array_loc (loc, type, fn, n, argarray);
10609 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10611 tree arglist = NULL_TREE;
10612 for (i = n - 1; i >= 0; i--)
10613 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10614 ret = targetm.fold_builtin (fndecl, arglist, false);
10617 return build_call_array_loc (loc, type, fn, n, argarray);
10619 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10621 /* First try the transformations that don't require consing up
10623 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10628 /* If we got this far, we need to build an exp. */
10629 exp = build_call_array_loc (loc, type, fn, n, argarray);
10630 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10631 return ret ? ret : exp;
10635 return build_call_array_loc (loc, type, fn, n, argarray);
10638 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10639 along with N new arguments specified as the "..." parameters. SKIP
10640 is the number of arguments in EXP to be omitted. This function is used
10641 to do varargs-to-varargs transformations. */
10644 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10646 int oldnargs = call_expr_nargs (exp);
10647 int nargs = oldnargs - skip + n;
10648 tree fntype = TREE_TYPE (fndecl);
10649 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10657 buffer = XALLOCAVEC (tree, nargs);
10659 for (i = 0; i < n; i++)
10660 buffer[i] = va_arg (ap, tree);
10662 for (j = skip; j < oldnargs; j++, i++)
10663 buffer[i] = CALL_EXPR_ARG (exp, j);
10666 buffer = CALL_EXPR_ARGP (exp) + skip;
10668 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10671 /* Validate a single argument ARG against a tree code CODE representing
10675 validate_arg (const_tree arg, enum tree_code code)
10679 else if (code == POINTER_TYPE)
10680 return POINTER_TYPE_P (TREE_TYPE (arg));
10681 else if (code == INTEGER_TYPE)
10682 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10683 return code == TREE_CODE (TREE_TYPE (arg));
10686 /* This function validates the types of a function call argument list
10687 against a specified list of tree_codes. If the last specifier is a 0,
10688 that represents an ellipses, otherwise the last specifier must be a
10691 This is the GIMPLE version of validate_arglist. Eventually we want to
10692 completely convert builtins.c to work from GIMPLEs and the tree based
10693 validate_arglist will then be removed. */
10696 validate_gimple_arglist (const_gimple call, ...)
10698 enum tree_code code;
10704 va_start (ap, call);
10709 code = (enum tree_code) va_arg (ap, int);
10713 /* This signifies an ellipses, any further arguments are all ok. */
10717 /* This signifies an endlink, if no arguments remain, return
10718 true, otherwise return false. */
10719 res = (i == gimple_call_num_args (call));
10722 /* If no parameters remain or the parameter's code does not
10723 match the specified code, return false. Otherwise continue
10724 checking any remaining arguments. */
10725 arg = gimple_call_arg (call, i++);
10726 if (!validate_arg (arg, code))
10733 /* We need gotos here since we can only have one VA_CLOSE in a
10741 /* This function validates the types of a function call argument list
10742 against a specified list of tree_codes. If the last specifier is a 0,
10743 that represents an ellipses, otherwise the last specifier must be a
10747 validate_arglist (const_tree callexpr, ...)
10749 enum tree_code code;
10752 const_call_expr_arg_iterator iter;
10755 va_start (ap, callexpr);
10756 init_const_call_expr_arg_iterator (callexpr, &iter);
10760 code = (enum tree_code) va_arg (ap, int);
10764 /* This signifies an ellipses, any further arguments are all ok. */
10768 /* This signifies an endlink, if no arguments remain, return
10769 true, otherwise return false. */
10770 res = !more_const_call_expr_args_p (&iter);
10773 /* If no parameters remain or the parameter's code does not
10774 match the specified code, return false. Otherwise continue
10775 checking any remaining arguments. */
10776 arg = next_const_call_expr_arg (&iter);
10777 if (!validate_arg (arg, code))
10784 /* We need gotos here since we can only have one VA_CLOSE in a
10792 /* Default target-specific builtin expander that does nothing. */
10795 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10796 rtx target ATTRIBUTE_UNUSED,
10797 rtx subtarget ATTRIBUTE_UNUSED,
10798 enum machine_mode mode ATTRIBUTE_UNUSED,
10799 int ignore ATTRIBUTE_UNUSED)
10804 /* Returns true is EXP represents data that would potentially reside
10805 in a readonly section. */
10808 readonly_data_expr (tree exp)
10812 if (TREE_CODE (exp) != ADDR_EXPR)
10815 exp = get_base_address (TREE_OPERAND (exp, 0));
10819 /* Make sure we call decl_readonly_section only for trees it
10820 can handle (since it returns true for everything it doesn't
10822 if (TREE_CODE (exp) == STRING_CST
10823 || TREE_CODE (exp) == CONSTRUCTOR
10824 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10825 return decl_readonly_section (exp, 0);
10830 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10831 to the call, and TYPE is its return type.
10833 Return NULL_TREE if no simplification was possible, otherwise return the
10834 simplified form of the call as a tree.
10836 The simplified form may be a constant or other expression which
10837 computes the same value, but in a more efficient manner (including
10838 calls to other builtin functions).
10840 The call may contain arguments which need to be evaluated, but
10841 which are not useful to determine the result of the call. In
10842 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10843 COMPOUND_EXPR will be an argument which must be evaluated.
10844 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10845 COMPOUND_EXPR in the chain will contain the tree for the simplified
10846 form of the builtin function call. */
10849 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10851 if (!validate_arg (s1, POINTER_TYPE)
10852 || !validate_arg (s2, POINTER_TYPE))
10857 const char *p1, *p2;
10859 p2 = c_getstr (s2);
10863 p1 = c_getstr (s1);
10866 const char *r = strstr (p1, p2);
10870 return build_int_cst (TREE_TYPE (s1), 0);
10872 /* Return an offset into the constant string argument. */
10873 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10874 s1, size_int (r - p1));
10875 return fold_convert_loc (loc, type, tem);
10878 /* The argument is const char *, and the result is char *, so we need
10879 a type conversion here to avoid a warning. */
10881 return fold_convert_loc (loc, type, s1);
10886 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10890 /* New argument list transforming strstr(s1, s2) to
10891 strchr(s1, s2[0]). */
10892 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10896 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10897 the call, and TYPE is its return type.
10899 Return NULL_TREE if no simplification was possible, otherwise return the
10900 simplified form of the call as a tree.
10902 The simplified form may be a constant or other expression which
10903 computes the same value, but in a more efficient manner (including
10904 calls to other builtin functions).
10906 The call may contain arguments which need to be evaluated, but
10907 which are not useful to determine the result of the call. In
10908 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10909 COMPOUND_EXPR will be an argument which must be evaluated.
10910 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10911 COMPOUND_EXPR in the chain will contain the tree for the simplified
10912 form of the builtin function call. */
10915 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10917 if (!validate_arg (s1, POINTER_TYPE)
10918 || !validate_arg (s2, INTEGER_TYPE))
10924 if (TREE_CODE (s2) != INTEGER_CST)
10927 p1 = c_getstr (s1);
10934 if (target_char_cast (s2, &c))
10937 r = strchr (p1, c);
10940 return build_int_cst (TREE_TYPE (s1), 0);
10942 /* Return an offset into the constant string argument. */
10943 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10944 s1, size_int (r - p1));
10945 return fold_convert_loc (loc, type, tem);
10951 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10952 the call, and TYPE is its return type.
10954 Return NULL_TREE if no simplification was possible, otherwise return the
10955 simplified form of the call as a tree.
10957 The simplified form may be a constant or other expression which
10958 computes the same value, but in a more efficient manner (including
10959 calls to other builtin functions).
10961 The call may contain arguments which need to be evaluated, but
10962 which are not useful to determine the result of the call. In
10963 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10964 COMPOUND_EXPR will be an argument which must be evaluated.
10965 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10966 COMPOUND_EXPR in the chain will contain the tree for the simplified
10967 form of the builtin function call. */
10970 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10972 if (!validate_arg (s1, POINTER_TYPE)
10973 || !validate_arg (s2, INTEGER_TYPE))
10980 if (TREE_CODE (s2) != INTEGER_CST)
10983 p1 = c_getstr (s1);
10990 if (target_char_cast (s2, &c))
10993 r = strrchr (p1, c);
10996 return build_int_cst (TREE_TYPE (s1), 0);
10998 /* Return an offset into the constant string argument. */
10999 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11000 s1, size_int (r - p1));
11001 return fold_convert_loc (loc, type, tem);
11004 if (! integer_zerop (s2))
11007 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11011 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11012 return build_call_expr_loc (loc, fn, 2, s1, s2);
11016 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11017 to the call, and TYPE is its return type.
11019 Return NULL_TREE if no simplification was possible, otherwise return the
11020 simplified form of the call as a tree.
11022 The simplified form may be a constant or other expression which
11023 computes the same value, but in a more efficient manner (including
11024 calls to other builtin functions).
11026 The call may contain arguments which need to be evaluated, but
11027 which are not useful to determine the result of the call. In
11028 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11029 COMPOUND_EXPR will be an argument which must be evaluated.
11030 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11031 COMPOUND_EXPR in the chain will contain the tree for the simplified
11032 form of the builtin function call. */
11035 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11037 if (!validate_arg (s1, POINTER_TYPE)
11038 || !validate_arg (s2, POINTER_TYPE))
11043 const char *p1, *p2;
11045 p2 = c_getstr (s2);
11049 p1 = c_getstr (s1);
11052 const char *r = strpbrk (p1, p2);
11056 return build_int_cst (TREE_TYPE (s1), 0);
11058 /* Return an offset into the constant string argument. */
11059 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11060 s1, size_int (r - p1));
11061 return fold_convert_loc (loc, type, tem);
11065 /* strpbrk(x, "") == NULL.
11066 Evaluate and ignore s1 in case it had side-effects. */
11067 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11070 return NULL_TREE; /* Really call strpbrk. */
11072 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11076 /* New argument list transforming strpbrk(s1, s2) to
11077 strchr(s1, s2[0]). */
11078 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11082 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11085 Return NULL_TREE if no simplification was possible, otherwise return the
11086 simplified form of the call as a tree.
11088 The simplified form may be a constant or other expression which
11089 computes the same value, but in a more efficient manner (including
11090 calls to other builtin functions).
11092 The call may contain arguments which need to be evaluated, but
11093 which are not useful to determine the result of the call. In
11094 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11095 COMPOUND_EXPR will be an argument which must be evaluated.
11096 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11097 COMPOUND_EXPR in the chain will contain the tree for the simplified
11098 form of the builtin function call. */
11101 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11103 if (!validate_arg (dst, POINTER_TYPE)
11104 || !validate_arg (src, POINTER_TYPE))
11108 const char *p = c_getstr (src);
11110 /* If the string length is zero, return the dst parameter. */
11111 if (p && *p == '\0')
11114 if (optimize_insn_for_speed_p ())
11116 /* See if we can store by pieces into (dst + strlen(dst)). */
11118 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11119 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11121 if (!strlen_fn || !strcpy_fn)
11124 /* If we don't have a movstr we don't want to emit an strcpy
11125 call. We have to do that if the length of the source string
11126 isn't computable (in that case we can use memcpy probably
11127 later expanding to a sequence of mov instructions). If we
11128 have movstr instructions we can emit strcpy calls. */
11131 tree len = c_strlen (src, 1);
11132 if (! len || TREE_SIDE_EFFECTS (len))
11136 /* Stabilize the argument list. */
11137 dst = builtin_save_expr (dst);
11139 /* Create strlen (dst). */
11140 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11141 /* Create (dst p+ strlen (dst)). */
11143 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11144 TREE_TYPE (dst), dst, newdst);
11145 newdst = builtin_save_expr (newdst);
11147 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11148 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11154 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11155 arguments to the call.
11157 Return NULL_TREE if no simplification was possible, otherwise return the
11158 simplified form of the call as a tree.
11160 The simplified form may be a constant or other expression which
11161 computes the same value, but in a more efficient manner (including
11162 calls to other builtin functions).
11164 The call may contain arguments which need to be evaluated, but
11165 which are not useful to determine the result of the call. In
11166 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11167 COMPOUND_EXPR will be an argument which must be evaluated.
11168 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11169 COMPOUND_EXPR in the chain will contain the tree for the simplified
11170 form of the builtin function call. */
11173 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11175 if (!validate_arg (dst, POINTER_TYPE)
11176 || !validate_arg (src, POINTER_TYPE)
11177 || !validate_arg (len, INTEGER_TYPE))
11181 const char *p = c_getstr (src);
11183 /* If the requested length is zero, or the src parameter string
11184 length is zero, return the dst parameter. */
11185 if (integer_zerop (len) || (p && *p == '\0'))
11186 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11188 /* If the requested len is greater than or equal to the string
11189 length, call strcat. */
11190 if (TREE_CODE (len) == INTEGER_CST && p
11191 && compare_tree_int (len, strlen (p)) >= 0)
11193 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11195 /* If the replacement _DECL isn't initialized, don't do the
11200 return build_call_expr_loc (loc, fn, 2, dst, src);
11206 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11209 Return NULL_TREE if no simplification was possible, otherwise return the
11210 simplified form of the call as a tree.
11212 The simplified form may be a constant or other expression which
11213 computes the same value, but in a more efficient manner (including
11214 calls to other builtin functions).
11216 The call may contain arguments which need to be evaluated, but
11217 which are not useful to determine the result of the call. In
11218 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11219 COMPOUND_EXPR will be an argument which must be evaluated.
11220 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11221 COMPOUND_EXPR in the chain will contain the tree for the simplified
11222 form of the builtin function call. */
11225 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11227 if (!validate_arg (s1, POINTER_TYPE)
11228 || !validate_arg (s2, POINTER_TYPE))
11232 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11234 /* If both arguments are constants, evaluate at compile-time. */
11237 const size_t r = strspn (p1, p2);
11238 return size_int (r);
11241 /* If either argument is "", return NULL_TREE. */
11242 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11243 /* Evaluate and ignore both arguments in case either one has
11245 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11251 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11254 Return NULL_TREE if no simplification was possible, otherwise return the
11255 simplified form of the call as a tree.
11257 The simplified form may be a constant or other expression which
11258 computes the same value, but in a more efficient manner (including
11259 calls to other builtin functions).
11261 The call may contain arguments which need to be evaluated, but
11262 which are not useful to determine the result of the call. In
11263 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11264 COMPOUND_EXPR will be an argument which must be evaluated.
11265 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11266 COMPOUND_EXPR in the chain will contain the tree for the simplified
11267 form of the builtin function call. */
11270 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11272 if (!validate_arg (s1, POINTER_TYPE)
11273 || !validate_arg (s2, POINTER_TYPE))
11277 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11279 /* If both arguments are constants, evaluate at compile-time. */
11282 const size_t r = strcspn (p1, p2);
11283 return size_int (r);
11286 /* If the first argument is "", return NULL_TREE. */
11287 if (p1 && *p1 == '\0')
11289 /* Evaluate and ignore argument s2 in case it has
11291 return omit_one_operand_loc (loc, size_type_node,
11292 size_zero_node, s2);
11295 /* If the second argument is "", return __builtin_strlen(s1). */
11296 if (p2 && *p2 == '\0')
11298 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11300 /* If the replacement _DECL isn't initialized, don't do the
11305 return build_call_expr_loc (loc, fn, 1, s1);
11311 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11312 to the call. IGNORE is true if the value returned
11313 by the builtin will be ignored. UNLOCKED is true is true if this
11314 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11315 the known length of the string. Return NULL_TREE if no simplification
11319 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11320 bool ignore, bool unlocked, tree len)
11322 /* If we're using an unlocked function, assume the other unlocked
11323 functions exist explicitly. */
11324 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11325 : implicit_built_in_decls[BUILT_IN_FPUTC];
11326 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11327 : implicit_built_in_decls[BUILT_IN_FWRITE];
11329 /* If the return value is used, don't do the transformation. */
11333 /* Verify the arguments in the original call. */
11334 if (!validate_arg (arg0, POINTER_TYPE)
11335 || !validate_arg (arg1, POINTER_TYPE))
11339 len = c_strlen (arg0, 0);
11341 /* Get the length of the string passed to fputs. If the length
11342 can't be determined, punt. */
11344 || TREE_CODE (len) != INTEGER_CST)
11347 switch (compare_tree_int (len, 1))
11349 case -1: /* length is 0, delete the call entirely . */
11350 return omit_one_operand_loc (loc, integer_type_node,
11351 integer_zero_node, arg1);;
11353 case 0: /* length is 1, call fputc. */
11355 const char *p = c_getstr (arg0);
11360 return build_call_expr_loc (loc, fn_fputc, 2,
11361 build_int_cst (NULL_TREE, p[0]), arg1);
11367 case 1: /* length is greater than 1, call fwrite. */
11369 /* If optimizing for size keep fputs. */
11370 if (optimize_function_for_size_p (cfun))
11372 /* New argument list transforming fputs(string, stream) to
11373 fwrite(string, 1, len, stream). */
11375 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11376 size_one_node, len, arg1);
11381 gcc_unreachable ();
11386 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11387 produced. False otherwise. This is done so that we don't output the error
11388 or warning twice or three times. */
11391 fold_builtin_next_arg (tree exp, bool va_start_p)
11393 tree fntype = TREE_TYPE (current_function_decl);
11394 int nargs = call_expr_nargs (exp);
11397 if (TYPE_ARG_TYPES (fntype) == 0
11398 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11399 == void_type_node))
11401 error ("%<va_start%> used in function with fixed args");
11407 if (va_start_p && (nargs != 2))
11409 error ("wrong number of arguments to function %<va_start%>");
11412 arg = CALL_EXPR_ARG (exp, 1);
11414 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11415 when we checked the arguments and if needed issued a warning. */
11420 /* Evidently an out of date version of <stdarg.h>; can't validate
11421 va_start's second argument, but can still work as intended. */
11422 warning (0, "%<__builtin_next_arg%> called without an argument");
11425 else if (nargs > 1)
11427 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11430 arg = CALL_EXPR_ARG (exp, 0);
11433 if (TREE_CODE (arg) == SSA_NAME)
11434 arg = SSA_NAME_VAR (arg);
11436 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11437 or __builtin_next_arg (0) the first time we see it, after checking
11438 the arguments and if needed issuing a warning. */
11439 if (!integer_zerop (arg))
11441 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11443 /* Strip off all nops for the sake of the comparison. This
11444 is not quite the same as STRIP_NOPS. It does more.
11445 We must also strip off INDIRECT_EXPR for C++ reference
11447 while (CONVERT_EXPR_P (arg)
11448 || TREE_CODE (arg) == INDIRECT_REF)
11449 arg = TREE_OPERAND (arg, 0);
11450 if (arg != last_parm)
11452 /* FIXME: Sometimes with the tree optimizers we can get the
11453 not the last argument even though the user used the last
11454 argument. We just warn and set the arg to be the last
11455 argument so that we will get wrong-code because of
11457 warning (0, "second parameter of %<va_start%> not last named argument");
11460 /* Undefined by C99 7.15.1.4p4 (va_start):
11461 "If the parameter parmN is declared with the register storage
11462 class, with a function or array type, or with a type that is
11463 not compatible with the type that results after application of
11464 the default argument promotions, the behavior is undefined."
11466 else if (DECL_REGISTER (arg))
11467 warning (0, "undefined behaviour when second parameter of "
11468 "%<va_start%> is declared with %<register%> storage");
11470 /* We want to verify the second parameter just once before the tree
11471 optimizers are run and then avoid keeping it in the tree,
11472 as otherwise we could warn even for correct code like:
11473 void foo (int i, ...)
11474 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11476 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11478 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11484 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11485 ORIG may be null if this is a 2-argument call. We don't attempt to
11486 simplify calls with more than 3 arguments.
11488 Return NULL_TREE if no simplification was possible, otherwise return the
11489 simplified form of the call as a tree. If IGNORED is true, it means that
11490 the caller does not use the returned value of the function. */
11493 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11494 tree orig, int ignored)
11497 const char *fmt_str = NULL;
11499 /* Verify the required arguments in the original call. We deal with two
11500 types of sprintf() calls: 'sprintf (str, fmt)' and
11501 'sprintf (dest, "%s", orig)'. */
11502 if (!validate_arg (dest, POINTER_TYPE)
11503 || !validate_arg (fmt, POINTER_TYPE))
11505 if (orig && !validate_arg (orig, POINTER_TYPE))
11508 /* Check whether the format is a literal string constant. */
11509 fmt_str = c_getstr (fmt);
11510 if (fmt_str == NULL)
11514 retval = NULL_TREE;
11516 if (!init_target_chars ())
11519 /* If the format doesn't contain % args or %%, use strcpy. */
11520 if (strchr (fmt_str, target_percent) == NULL)
11522 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11527 /* Don't optimize sprintf (buf, "abc", ptr++). */
11531 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11532 'format' is known to contain no % formats. */
11533 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11535 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11538 /* If the format is "%s", use strcpy if the result isn't used. */
11539 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11542 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11547 /* Don't crash on sprintf (str1, "%s"). */
11551 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11554 retval = c_strlen (orig, 1);
11555 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11558 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11561 if (call && retval)
11563 retval = fold_convert_loc
11564 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11566 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11572 /* Expand a call EXP to __builtin_object_size. */
11575 expand_builtin_object_size (tree exp)
11578 int object_size_type;
11579 tree fndecl = get_callee_fndecl (exp);
11581 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11583 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11585 expand_builtin_trap ();
11589 ost = CALL_EXPR_ARG (exp, 1);
11592 if (TREE_CODE (ost) != INTEGER_CST
11593 || tree_int_cst_sgn (ost) < 0
11594 || compare_tree_int (ost, 3) > 0)
11596 error ("%Klast argument of %D is not integer constant between 0 and 3",
11598 expand_builtin_trap ();
11602 object_size_type = tree_low_cst (ost, 0);
11604 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11607 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11608 FCODE is the BUILT_IN_* to use.
11609 Return NULL_RTX if we failed; the caller should emit a normal call,
11610 otherwise try to get the result in TARGET, if convenient (and in
11611 mode MODE if that's convenient). */
11614 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11615 enum built_in_function fcode)
11617 tree dest, src, len, size;
11619 if (!validate_arglist (exp,
11621 fcode == BUILT_IN_MEMSET_CHK
11622 ? INTEGER_TYPE : POINTER_TYPE,
11623 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11626 dest = CALL_EXPR_ARG (exp, 0);
11627 src = CALL_EXPR_ARG (exp, 1);
11628 len = CALL_EXPR_ARG (exp, 2);
11629 size = CALL_EXPR_ARG (exp, 3);
11631 if (! host_integerp (size, 1))
11634 if (host_integerp (len, 1) || integer_all_onesp (size))
11638 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11640 warning_at (tree_nonartificial_location (exp),
11641 0, "%Kcall to %D will always overflow destination buffer",
11642 exp, get_callee_fndecl (exp));
11647 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11648 mem{cpy,pcpy,move,set} is available. */
11651 case BUILT_IN_MEMCPY_CHK:
11652 fn = built_in_decls[BUILT_IN_MEMCPY];
11654 case BUILT_IN_MEMPCPY_CHK:
11655 fn = built_in_decls[BUILT_IN_MEMPCPY];
11657 case BUILT_IN_MEMMOVE_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMMOVE];
11660 case BUILT_IN_MEMSET_CHK:
11661 fn = built_in_decls[BUILT_IN_MEMSET];
11670 fn = build_call_nofold (fn, 3, dest, src, len);
11671 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11672 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11673 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11675 else if (fcode == BUILT_IN_MEMSET_CHK)
11679 unsigned int dest_align
11680 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11682 /* If DEST is not a pointer type, call the normal function. */
11683 if (dest_align == 0)
11686 /* If SRC and DEST are the same (and not volatile), do nothing. */
11687 if (operand_equal_p (src, dest, 0))
11691 if (fcode != BUILT_IN_MEMPCPY_CHK)
11693 /* Evaluate and ignore LEN in case it has side-effects. */
11694 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11695 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11698 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11699 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11702 /* __memmove_chk special case. */
11703 if (fcode == BUILT_IN_MEMMOVE_CHK)
11705 unsigned int src_align
11706 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11708 if (src_align == 0)
11711 /* If src is categorized for a readonly section we can use
11712 normal __memcpy_chk. */
11713 if (readonly_data_expr (src))
11715 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11718 fn = build_call_nofold (fn, 4, dest, src, len, size);
11719 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11720 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11721 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11728 /* Emit warning if a buffer overflow is detected at compile time. */
11731 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11735 location_t loc = tree_nonartificial_location (exp);
11739 case BUILT_IN_STRCPY_CHK:
11740 case BUILT_IN_STPCPY_CHK:
11741 /* For __strcat_chk the warning will be emitted only if overflowing
11742 by at least strlen (dest) + 1 bytes. */
11743 case BUILT_IN_STRCAT_CHK:
11744 len = CALL_EXPR_ARG (exp, 1);
11745 size = CALL_EXPR_ARG (exp, 2);
11748 case BUILT_IN_STRNCAT_CHK:
11749 case BUILT_IN_STRNCPY_CHK:
11750 len = CALL_EXPR_ARG (exp, 2);
11751 size = CALL_EXPR_ARG (exp, 3);
11753 case BUILT_IN_SNPRINTF_CHK:
11754 case BUILT_IN_VSNPRINTF_CHK:
11755 len = CALL_EXPR_ARG (exp, 1);
11756 size = CALL_EXPR_ARG (exp, 3);
11759 gcc_unreachable ();
11765 if (! host_integerp (size, 1) || integer_all_onesp (size))
11770 len = c_strlen (len, 1);
11771 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11774 else if (fcode == BUILT_IN_STRNCAT_CHK)
11776 tree src = CALL_EXPR_ARG (exp, 1);
11777 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11779 src = c_strlen (src, 1);
11780 if (! src || ! host_integerp (src, 1))
11782 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11783 exp, get_callee_fndecl (exp));
11786 else if (tree_int_cst_lt (src, size))
11789 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11792 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11793 exp, get_callee_fndecl (exp));
11796 /* Emit warning if a buffer overflow is detected at compile time
11797 in __sprintf_chk/__vsprintf_chk calls. */
11800 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11802 tree size, len, fmt;
11803 const char *fmt_str;
11804 int nargs = call_expr_nargs (exp);
11806 /* Verify the required arguments in the original call. */
11810 size = CALL_EXPR_ARG (exp, 2);
11811 fmt = CALL_EXPR_ARG (exp, 3);
11813 if (! host_integerp (size, 1) || integer_all_onesp (size))
11816 /* Check whether the format is a literal string constant. */
11817 fmt_str = c_getstr (fmt);
11818 if (fmt_str == NULL)
11821 if (!init_target_chars ())
11824 /* If the format doesn't contain % args or %%, we know its size. */
11825 if (strchr (fmt_str, target_percent) == 0)
11826 len = build_int_cstu (size_type_node, strlen (fmt_str));
11827 /* If the format is "%s" and first ... argument is a string literal,
11829 else if (fcode == BUILT_IN_SPRINTF_CHK
11830 && strcmp (fmt_str, target_percent_s) == 0)
11836 arg = CALL_EXPR_ARG (exp, 4);
11837 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11840 len = c_strlen (arg, 1);
11841 if (!len || ! host_integerp (len, 1))
11847 if (! tree_int_cst_lt (len, size))
11848 warning_at (tree_nonartificial_location (exp),
11849 0, "%Kcall to %D will always overflow destination buffer",
11850 exp, get_callee_fndecl (exp));
11853 /* Emit warning if a free is called with address of a variable. */
11856 maybe_emit_free_warning (tree exp)
11858 tree arg = CALL_EXPR_ARG (exp, 0);
11861 if (TREE_CODE (arg) != ADDR_EXPR)
11864 arg = get_base_address (TREE_OPERAND (arg, 0));
11865 if (arg == NULL || INDIRECT_REF_P (arg))
11868 if (SSA_VAR_P (arg))
11869 warning_at (tree_nonartificial_location (exp),
11870 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11872 warning_at (tree_nonartificial_location (exp),
11873 0, "%Kattempt to free a non-heap object", exp);
11876 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11880 fold_builtin_object_size (tree ptr, tree ost)
11882 tree ret = NULL_TREE;
11883 int object_size_type;
11885 if (!validate_arg (ptr, POINTER_TYPE)
11886 || !validate_arg (ost, INTEGER_TYPE))
11891 if (TREE_CODE (ost) != INTEGER_CST
11892 || tree_int_cst_sgn (ost) < 0
11893 || compare_tree_int (ost, 3) > 0)
11896 object_size_type = tree_low_cst (ost, 0);
11898 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11899 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11900 and (size_t) 0 for types 2 and 3. */
11901 if (TREE_SIDE_EFFECTS (ptr))
11902 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11904 if (TREE_CODE (ptr) == ADDR_EXPR)
11905 ret = build_int_cstu (size_type_node,
11906 compute_builtin_object_size (ptr, object_size_type));
11908 else if (TREE_CODE (ptr) == SSA_NAME)
11910 unsigned HOST_WIDE_INT bytes;
11912 /* If object size is not known yet, delay folding until
11913 later. Maybe subsequent passes will help determining
11915 bytes = compute_builtin_object_size (ptr, object_size_type);
11916 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11918 ret = build_int_cstu (size_type_node, bytes);
11923 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11924 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11925 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11932 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11933 DEST, SRC, LEN, and SIZE are the arguments to the call.
11934 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11935 code of the builtin. If MAXLEN is not NULL, it is maximum length
11936 passed as third argument. */
11939 fold_builtin_memory_chk (location_t loc, tree fndecl,
11940 tree dest, tree src, tree len, tree size,
11941 tree maxlen, bool ignore,
11942 enum built_in_function fcode)
11946 if (!validate_arg (dest, POINTER_TYPE)
11947 || !validate_arg (src,
11948 (fcode == BUILT_IN_MEMSET_CHK
11949 ? INTEGER_TYPE : POINTER_TYPE))
11950 || !validate_arg (len, INTEGER_TYPE)
11951 || !validate_arg (size, INTEGER_TYPE))
11954 /* If SRC and DEST are the same (and not volatile), return DEST
11955 (resp. DEST+LEN for __mempcpy_chk). */
11956 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11958 if (fcode != BUILT_IN_MEMPCPY_CHK)
11959 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11963 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11965 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11969 if (! host_integerp (size, 1))
11972 if (! integer_all_onesp (size))
11974 if (! host_integerp (len, 1))
11976 /* If LEN is not constant, try MAXLEN too.
11977 For MAXLEN only allow optimizing into non-_ocs function
11978 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11979 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11981 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11983 /* (void) __mempcpy_chk () can be optimized into
11984 (void) __memcpy_chk (). */
11985 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11989 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11997 if (tree_int_cst_lt (size, maxlen))
12002 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12003 mem{cpy,pcpy,move,set} is available. */
12006 case BUILT_IN_MEMCPY_CHK:
12007 fn = built_in_decls[BUILT_IN_MEMCPY];
12009 case BUILT_IN_MEMPCPY_CHK:
12010 fn = built_in_decls[BUILT_IN_MEMPCPY];
12012 case BUILT_IN_MEMMOVE_CHK:
12013 fn = built_in_decls[BUILT_IN_MEMMOVE];
12015 case BUILT_IN_MEMSET_CHK:
12016 fn = built_in_decls[BUILT_IN_MEMSET];
12025 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12028 /* Fold a call to the __st[rp]cpy_chk builtin.
12029 DEST, SRC, and SIZE are the arguments to the call.
12030 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12031 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12032 strings passed as second argument. */
12035 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12036 tree src, tree size,
12037 tree maxlen, bool ignore,
12038 enum built_in_function fcode)
12042 if (!validate_arg (dest, POINTER_TYPE)
12043 || !validate_arg (src, POINTER_TYPE)
12044 || !validate_arg (size, INTEGER_TYPE))
12047 /* If SRC and DEST are the same (and not volatile), return DEST. */
12048 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12049 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12051 if (! host_integerp (size, 1))
12054 if (! integer_all_onesp (size))
12056 len = c_strlen (src, 1);
12057 if (! len || ! host_integerp (len, 1))
12059 /* If LEN is not constant, try MAXLEN too.
12060 For MAXLEN only allow optimizing into non-_ocs function
12061 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12062 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12064 if (fcode == BUILT_IN_STPCPY_CHK)
12069 /* If return value of __stpcpy_chk is ignored,
12070 optimize into __strcpy_chk. */
12071 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12075 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12078 if (! len || TREE_SIDE_EFFECTS (len))
12081 /* If c_strlen returned something, but not a constant,
12082 transform __strcpy_chk into __memcpy_chk. */
12083 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12087 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12088 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12089 build_call_expr_loc (loc, fn, 4,
12090 dest, src, len, size));
12096 if (! tree_int_cst_lt (maxlen, size))
12100 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12101 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12102 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12106 return build_call_expr_loc (loc, fn, 2, dest, src);
12109 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12110 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12111 length passed as third argument. */
12114 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12115 tree len, tree size, tree maxlen)
12119 if (!validate_arg (dest, POINTER_TYPE)
12120 || !validate_arg (src, POINTER_TYPE)
12121 || !validate_arg (len, INTEGER_TYPE)
12122 || !validate_arg (size, INTEGER_TYPE))
12125 if (! host_integerp (size, 1))
12128 if (! integer_all_onesp (size))
12130 if (! host_integerp (len, 1))
12132 /* If LEN is not constant, try MAXLEN too.
12133 For MAXLEN only allow optimizing into non-_ocs function
12134 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12135 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12141 if (tree_int_cst_lt (size, maxlen))
12145 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12146 fn = built_in_decls[BUILT_IN_STRNCPY];
12150 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12153 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12154 are the arguments to the call. */
12157 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12158 tree src, tree size)
12163 if (!validate_arg (dest, POINTER_TYPE)
12164 || !validate_arg (src, POINTER_TYPE)
12165 || !validate_arg (size, INTEGER_TYPE))
12168 p = c_getstr (src);
12169 /* If the SRC parameter is "", return DEST. */
12170 if (p && *p == '\0')
12171 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12173 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12176 /* If __builtin_strcat_chk is used, assume strcat is available. */
12177 fn = built_in_decls[BUILT_IN_STRCAT];
12181 return build_call_expr_loc (loc, fn, 2, dest, src);
12184 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12188 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12189 tree dest, tree src, tree len, tree size)
12194 if (!validate_arg (dest, POINTER_TYPE)
12195 || !validate_arg (src, POINTER_TYPE)
12196 || !validate_arg (size, INTEGER_TYPE)
12197 || !validate_arg (size, INTEGER_TYPE))
12200 p = c_getstr (src);
12201 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12202 if (p && *p == '\0')
12203 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12204 else if (integer_zerop (len))
12205 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12207 if (! host_integerp (size, 1))
12210 if (! integer_all_onesp (size))
12212 tree src_len = c_strlen (src, 1);
12214 && host_integerp (src_len, 1)
12215 && host_integerp (len, 1)
12216 && ! tree_int_cst_lt (len, src_len))
12218 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12219 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12223 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12228 /* If __builtin_strncat_chk is used, assume strncat is available. */
12229 fn = built_in_decls[BUILT_IN_STRNCAT];
12233 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12236 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12237 a normal call should be emitted rather than expanding the function
12238 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12241 fold_builtin_sprintf_chk (location_t loc, tree exp,
12242 enum built_in_function fcode)
12244 tree dest, size, len, fn, fmt, flag;
12245 const char *fmt_str;
12246 int nargs = call_expr_nargs (exp);
12248 /* Verify the required arguments in the original call. */
12251 dest = CALL_EXPR_ARG (exp, 0);
12252 if (!validate_arg (dest, POINTER_TYPE))
12254 flag = CALL_EXPR_ARG (exp, 1);
12255 if (!validate_arg (flag, INTEGER_TYPE))
12257 size = CALL_EXPR_ARG (exp, 2);
12258 if (!validate_arg (size, INTEGER_TYPE))
12260 fmt = CALL_EXPR_ARG (exp, 3);
12261 if (!validate_arg (fmt, POINTER_TYPE))
12264 if (! host_integerp (size, 1))
12269 if (!init_target_chars ())
12272 /* Check whether the format is a literal string constant. */
12273 fmt_str = c_getstr (fmt);
12274 if (fmt_str != NULL)
12276 /* If the format doesn't contain % args or %%, we know the size. */
12277 if (strchr (fmt_str, target_percent) == 0)
12279 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12280 len = build_int_cstu (size_type_node, strlen (fmt_str));
12282 /* If the format is "%s" and first ... argument is a string literal,
12283 we know the size too. */
12284 else if (fcode == BUILT_IN_SPRINTF_CHK
12285 && strcmp (fmt_str, target_percent_s) == 0)
12291 arg = CALL_EXPR_ARG (exp, 4);
12292 if (validate_arg (arg, POINTER_TYPE))
12294 len = c_strlen (arg, 1);
12295 if (! len || ! host_integerp (len, 1))
12302 if (! integer_all_onesp (size))
12304 if (! len || ! tree_int_cst_lt (len, size))
12308 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12309 or if format doesn't contain % chars or is "%s". */
12310 if (! integer_zerop (flag))
12312 if (fmt_str == NULL)
12314 if (strchr (fmt_str, target_percent) != NULL
12315 && strcmp (fmt_str, target_percent_s))
12319 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12320 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12321 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12325 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12328 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12329 a normal call should be emitted rather than expanding the function
12330 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12331 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12332 passed as second argument. */
12335 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12336 enum built_in_function fcode)
12338 tree dest, size, len, fn, fmt, flag;
12339 const char *fmt_str;
12341 /* Verify the required arguments in the original call. */
12342 if (call_expr_nargs (exp) < 5)
12344 dest = CALL_EXPR_ARG (exp, 0);
12345 if (!validate_arg (dest, POINTER_TYPE))
12347 len = CALL_EXPR_ARG (exp, 1);
12348 if (!validate_arg (len, INTEGER_TYPE))
12350 flag = CALL_EXPR_ARG (exp, 2);
12351 if (!validate_arg (flag, INTEGER_TYPE))
12353 size = CALL_EXPR_ARG (exp, 3);
12354 if (!validate_arg (size, INTEGER_TYPE))
12356 fmt = CALL_EXPR_ARG (exp, 4);
12357 if (!validate_arg (fmt, POINTER_TYPE))
12360 if (! host_integerp (size, 1))
12363 if (! integer_all_onesp (size))
12365 if (! host_integerp (len, 1))
12367 /* If LEN is not constant, try MAXLEN too.
12368 For MAXLEN only allow optimizing into non-_ocs function
12369 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12370 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12376 if (tree_int_cst_lt (size, maxlen))
12380 if (!init_target_chars ())
12383 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12384 or if format doesn't contain % chars or is "%s". */
12385 if (! integer_zerop (flag))
12387 fmt_str = c_getstr (fmt);
12388 if (fmt_str == NULL)
12390 if (strchr (fmt_str, target_percent) != NULL
12391 && strcmp (fmt_str, target_percent_s))
12395 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12397 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12398 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12402 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12405 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12406 FMT and ARG are the arguments to the call; we don't fold cases with
12407 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12409 Return NULL_TREE if no simplification was possible, otherwise return the
12410 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12411 code of the function to be simplified. */
12414 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12415 tree arg, bool ignore,
12416 enum built_in_function fcode)
12418 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12419 const char *fmt_str = NULL;
12421 /* If the return value is used, don't do the transformation. */
12425 /* Verify the required arguments in the original call. */
12426 if (!validate_arg (fmt, POINTER_TYPE))
12429 /* Check whether the format is a literal string constant. */
12430 fmt_str = c_getstr (fmt);
12431 if (fmt_str == NULL)
12434 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12436 /* If we're using an unlocked function, assume the other
12437 unlocked functions exist explicitly. */
12438 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12439 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12443 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12444 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12447 if (!init_target_chars ())
12450 if (strcmp (fmt_str, target_percent_s) == 0
12451 || strchr (fmt_str, target_percent) == NULL)
12455 if (strcmp (fmt_str, target_percent_s) == 0)
12457 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12460 if (!arg || !validate_arg (arg, POINTER_TYPE))
12463 str = c_getstr (arg);
12469 /* The format specifier doesn't contain any '%' characters. */
12470 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12476 /* If the string was "", printf does nothing. */
12477 if (str[0] == '\0')
12478 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12480 /* If the string has length of 1, call putchar. */
12481 if (str[1] == '\0')
12483 /* Given printf("c"), (where c is any one character,)
12484 convert "c"[0] to an int and pass that to the replacement
12486 newarg = build_int_cst (NULL_TREE, str[0]);
12488 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12492 /* If the string was "string\n", call puts("string"). */
12493 size_t len = strlen (str);
12494 if ((unsigned char)str[len - 1] == target_newline)
12496 /* Create a NUL-terminated string that's one char shorter
12497 than the original, stripping off the trailing '\n'. */
12498 char *newstr = XALLOCAVEC (char, len);
12499 memcpy (newstr, str, len - 1);
12500 newstr[len - 1] = 0;
12502 newarg = build_string_literal (len, newstr);
12504 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12507 /* We'd like to arrange to call fputs(string,stdout) here,
12508 but we need stdout and don't have a way to get it yet. */
12513 /* The other optimizations can be done only on the non-va_list variants. */
12514 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12517 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12518 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12520 if (!arg || !validate_arg (arg, POINTER_TYPE))
12523 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12526 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12527 else if (strcmp (fmt_str, target_percent_c) == 0)
12529 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12532 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12538 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12541 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12542 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12543 more than 3 arguments, and ARG may be null in the 2-argument case.
12545 Return NULL_TREE if no simplification was possible, otherwise return the
12546 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12547 code of the function to be simplified. */
12550 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12551 tree fmt, tree arg, bool ignore,
12552 enum built_in_function fcode)
12554 tree fn_fputc, fn_fputs, call = NULL_TREE;
12555 const char *fmt_str = NULL;
12557 /* If the return value is used, don't do the transformation. */
12561 /* Verify the required arguments in the original call. */
12562 if (!validate_arg (fp, POINTER_TYPE))
12564 if (!validate_arg (fmt, POINTER_TYPE))
12567 /* Check whether the format is a literal string constant. */
12568 fmt_str = c_getstr (fmt);
12569 if (fmt_str == NULL)
12572 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12574 /* If we're using an unlocked function, assume the other
12575 unlocked functions exist explicitly. */
12576 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12577 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12581 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12582 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12585 if (!init_target_chars ())
12588 /* If the format doesn't contain % args or %%, use strcpy. */
12589 if (strchr (fmt_str, target_percent) == NULL)
12591 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12595 /* If the format specifier was "", fprintf does nothing. */
12596 if (fmt_str[0] == '\0')
12598 /* If FP has side-effects, just wait until gimplification is
12600 if (TREE_SIDE_EFFECTS (fp))
12603 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12606 /* When "string" doesn't contain %, replace all cases of
12607 fprintf (fp, string) with fputs (string, fp). The fputs
12608 builtin will take care of special cases like length == 1. */
12610 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12613 /* The other optimizations can be done only on the non-va_list variants. */
12614 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12617 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12618 else if (strcmp (fmt_str, target_percent_s) == 0)
12620 if (!arg || !validate_arg (arg, POINTER_TYPE))
12623 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12626 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12627 else if (strcmp (fmt_str, target_percent_c) == 0)
12629 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12632 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12637 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12640 /* Initialize format string characters in the target charset. */
12643 init_target_chars (void)
12648 target_newline = lang_hooks.to_target_charset ('\n');
12649 target_percent = lang_hooks.to_target_charset ('%');
12650 target_c = lang_hooks.to_target_charset ('c');
12651 target_s = lang_hooks.to_target_charset ('s');
12652 if (target_newline == 0 || target_percent == 0 || target_c == 0
12656 target_percent_c[0] = target_percent;
12657 target_percent_c[1] = target_c;
12658 target_percent_c[2] = '\0';
12660 target_percent_s[0] = target_percent;
12661 target_percent_s[1] = target_s;
12662 target_percent_s[2] = '\0';
12664 target_percent_s_newline[0] = target_percent;
12665 target_percent_s_newline[1] = target_s;
12666 target_percent_s_newline[2] = target_newline;
12667 target_percent_s_newline[3] = '\0';
12674 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12675 and no overflow/underflow occurred. INEXACT is true if M was not
12676 exactly calculated. TYPE is the tree type for the result. This
12677 function assumes that you cleared the MPFR flags and then
12678 calculated M to see if anything subsequently set a flag prior to
12679 entering this function. Return NULL_TREE if any checks fail. */
12682 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12684 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12685 overflow/underflow occurred. If -frounding-math, proceed iff the
12686 result of calling FUNC was exact. */
12687 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12688 && (!flag_rounding_math || !inexact))
12690 REAL_VALUE_TYPE rr;
12692 real_from_mpfr (&rr, m, type, GMP_RNDN);
12693 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12694 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12695 but the mpft_t is not, then we underflowed in the
12697 if (real_isfinite (&rr)
12698 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12700 REAL_VALUE_TYPE rmode;
12702 real_convert (&rmode, TYPE_MODE (type), &rr);
12703 /* Proceed iff the specified mode can hold the value. */
12704 if (real_identical (&rmode, &rr))
12705 return build_real (type, rmode);
12711 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12712 number and no overflow/underflow occurred. INEXACT is true if M
12713 was not exactly calculated. TYPE is the tree type for the result.
12714 This function assumes that you cleared the MPFR flags and then
12715 calculated M to see if anything subsequently set a flag prior to
12716 entering this function. Return NULL_TREE if any checks fail, if
12717 FORCE_CONVERT is true, then bypass the checks. */
12720 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12722 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12723 overflow/underflow occurred. If -frounding-math, proceed iff the
12724 result of calling FUNC was exact. */
12726 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12727 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12728 && (!flag_rounding_math || !inexact)))
12730 REAL_VALUE_TYPE re, im;
12732 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12733 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12734 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12735 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12736 but the mpft_t is not, then we underflowed in the
12739 || (real_isfinite (&re) && real_isfinite (&im)
12740 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12741 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12743 REAL_VALUE_TYPE re_mode, im_mode;
12745 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12746 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12747 /* Proceed iff the specified mode can hold the value. */
12749 || (real_identical (&re_mode, &re)
12750 && real_identical (&im_mode, &im)))
12751 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12752 build_real (TREE_TYPE (type), im_mode));
12758 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12759 FUNC on it and return the resulting value as a tree with type TYPE.
12760 If MIN and/or MAX are not NULL, then the supplied ARG must be
12761 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12762 acceptable values, otherwise they are not. The mpfr precision is
12763 set to the precision of TYPE. We assume that function FUNC returns
12764 zero if the result could be calculated exactly within the requested
12768 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12769 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12772 tree result = NULL_TREE;
12776 /* To proceed, MPFR must exactly represent the target floating point
12777 format, which only happens when the target base equals two. */
12778 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12779 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12781 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12783 if (real_isfinite (ra)
12784 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12785 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12787 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12788 const int prec = fmt->p;
12789 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12793 mpfr_init2 (m, prec);
12794 mpfr_from_real (m, ra, GMP_RNDN);
12795 mpfr_clear_flags ();
12796 inexact = func (m, m, rnd);
12797 result = do_mpfr_ckconv (m, type, inexact);
12805 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12806 FUNC on it and return the resulting value as a tree with type TYPE.
12807 The mpfr precision is set to the precision of TYPE. We assume that
12808 function FUNC returns zero if the result could be calculated
12809 exactly within the requested precision. */
12812 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12813 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12815 tree result = NULL_TREE;
12820 /* To proceed, MPFR must exactly represent the target floating point
12821 format, which only happens when the target base equals two. */
12822 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12823 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12824 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12826 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12827 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12829 if (real_isfinite (ra1) && real_isfinite (ra2))
12831 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12832 const int prec = fmt->p;
12833 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12837 mpfr_inits2 (prec, m1, m2, NULL);
12838 mpfr_from_real (m1, ra1, GMP_RNDN);
12839 mpfr_from_real (m2, ra2, GMP_RNDN);
12840 mpfr_clear_flags ();
12841 inexact = func (m1, m1, m2, rnd);
12842 result = do_mpfr_ckconv (m1, type, inexact);
12843 mpfr_clears (m1, m2, NULL);
12850 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12851 FUNC on it and return the resulting value as a tree with type TYPE.
12852 The mpfr precision is set to the precision of TYPE. We assume that
12853 function FUNC returns zero if the result could be calculated
12854 exactly within the requested precision. */
12857 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12858 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12860 tree result = NULL_TREE;
12866 /* To proceed, MPFR must exactly represent the target floating point
12867 format, which only happens when the target base equals two. */
12868 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12869 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12870 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12871 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12873 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12874 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12875 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12877 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12879 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12880 const int prec = fmt->p;
12881 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12885 mpfr_inits2 (prec, m1, m2, m3, NULL);
12886 mpfr_from_real (m1, ra1, GMP_RNDN);
12887 mpfr_from_real (m2, ra2, GMP_RNDN);
12888 mpfr_from_real (m3, ra3, GMP_RNDN);
12889 mpfr_clear_flags ();
12890 inexact = func (m1, m1, m2, m3, rnd);
12891 result = do_mpfr_ckconv (m1, type, inexact);
12892 mpfr_clears (m1, m2, m3, NULL);
12899 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12900 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12901 If ARG_SINP and ARG_COSP are NULL then the result is returned
12902 as a complex value.
12903 The type is taken from the type of ARG and is used for setting the
12904 precision of the calculation and results. */
12907 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12909 tree const type = TREE_TYPE (arg);
12910 tree result = NULL_TREE;
12914 /* To proceed, MPFR must exactly represent the target floating point
12915 format, which only happens when the target base equals two. */
12916 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12917 && TREE_CODE (arg) == REAL_CST
12918 && !TREE_OVERFLOW (arg))
12920 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12922 if (real_isfinite (ra))
12924 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12925 const int prec = fmt->p;
12926 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12927 tree result_s, result_c;
12931 mpfr_inits2 (prec, m, ms, mc, NULL);
12932 mpfr_from_real (m, ra, GMP_RNDN);
12933 mpfr_clear_flags ();
12934 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12935 result_s = do_mpfr_ckconv (ms, type, inexact);
12936 result_c = do_mpfr_ckconv (mc, type, inexact);
12937 mpfr_clears (m, ms, mc, NULL);
12938 if (result_s && result_c)
12940 /* If we are to return in a complex value do so. */
12941 if (!arg_sinp && !arg_cosp)
12942 return build_complex (build_complex_type (type),
12943 result_c, result_s);
12945 /* Dereference the sin/cos pointer arguments. */
12946 arg_sinp = build_fold_indirect_ref (arg_sinp);
12947 arg_cosp = build_fold_indirect_ref (arg_cosp);
12948 /* Proceed if valid pointer type were passed in. */
12949 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12950 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12952 /* Set the values. */
12953 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12955 TREE_SIDE_EFFECTS (result_s) = 1;
12956 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12958 TREE_SIDE_EFFECTS (result_c) = 1;
12959 /* Combine the assignments into a compound expr. */
12960 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12961 result_s, result_c));
12969 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12970 two-argument mpfr order N Bessel function FUNC on them and return
12971 the resulting value as a tree with type TYPE. The mpfr precision
12972 is set to the precision of TYPE. We assume that function FUNC
12973 returns zero if the result could be calculated exactly within the
12974 requested precision. */
12976 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12977 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12978 const REAL_VALUE_TYPE *min, bool inclusive)
12980 tree result = NULL_TREE;
12985 /* To proceed, MPFR must exactly represent the target floating point
12986 format, which only happens when the target base equals two. */
12987 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12988 && host_integerp (arg1, 0)
12989 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12991 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12992 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12995 && real_isfinite (ra)
12996 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12998 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12999 const int prec = fmt->p;
13000 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13004 mpfr_init2 (m, prec);
13005 mpfr_from_real (m, ra, GMP_RNDN);
13006 mpfr_clear_flags ();
13007 inexact = func (m, n, m, rnd);
13008 result = do_mpfr_ckconv (m, type, inexact);
13016 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13017 the pointer *(ARG_QUO) and return the result. The type is taken
13018 from the type of ARG0 and is used for setting the precision of the
13019 calculation and results. */
13022 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13024 tree const type = TREE_TYPE (arg0);
13025 tree result = NULL_TREE;
13030 /* To proceed, MPFR must exactly represent the target floating point
13031 format, which only happens when the target base equals two. */
13032 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13033 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13034 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13036 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13037 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13039 if (real_isfinite (ra0) && real_isfinite (ra1))
13041 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13042 const int prec = fmt->p;
13043 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13048 mpfr_inits2 (prec, m0, m1, NULL);
13049 mpfr_from_real (m0, ra0, GMP_RNDN);
13050 mpfr_from_real (m1, ra1, GMP_RNDN);
13051 mpfr_clear_flags ();
13052 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13053 /* Remquo is independent of the rounding mode, so pass
13054 inexact=0 to do_mpfr_ckconv(). */
13055 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13056 mpfr_clears (m0, m1, NULL);
13059 /* MPFR calculates quo in the host's long so it may
13060 return more bits in quo than the target int can hold
13061 if sizeof(host long) > sizeof(target int). This can
13062 happen even for native compilers in LP64 mode. In
13063 these cases, modulo the quo value with the largest
13064 number that the target int can hold while leaving one
13065 bit for the sign. */
13066 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13067 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13069 /* Dereference the quo pointer argument. */
13070 arg_quo = build_fold_indirect_ref (arg_quo);
13071 /* Proceed iff a valid pointer type was passed in. */
13072 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13074 /* Set the value. */
13075 tree result_quo = fold_build2 (MODIFY_EXPR,
13076 TREE_TYPE (arg_quo), arg_quo,
13077 build_int_cst (NULL, integer_quo));
13078 TREE_SIDE_EFFECTS (result_quo) = 1;
13079 /* Combine the quo assignment with the rem. */
13080 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13081 result_quo, result_rem));
13089 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13090 resulting value as a tree with type TYPE. The mpfr precision is
13091 set to the precision of TYPE. We assume that this mpfr function
13092 returns zero if the result could be calculated exactly within the
13093 requested precision. In addition, the integer pointer represented
13094 by ARG_SG will be dereferenced and set to the appropriate signgam
13098 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13100 tree result = NULL_TREE;
13104 /* To proceed, MPFR must exactly represent the target floating point
13105 format, which only happens when the target base equals two. Also
13106 verify ARG is a constant and that ARG_SG is an int pointer. */
13107 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13108 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13109 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13110 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13112 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13114 /* In addition to NaN and Inf, the argument cannot be zero or a
13115 negative integer. */
13116 if (real_isfinite (ra)
13117 && ra->cl != rvc_zero
13118 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13120 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13121 const int prec = fmt->p;
13122 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13127 mpfr_init2 (m, prec);
13128 mpfr_from_real (m, ra, GMP_RNDN);
13129 mpfr_clear_flags ();
13130 inexact = mpfr_lgamma (m, &sg, m, rnd);
13131 result_lg = do_mpfr_ckconv (m, type, inexact);
13137 /* Dereference the arg_sg pointer argument. */
13138 arg_sg = build_fold_indirect_ref (arg_sg);
13139 /* Assign the signgam value into *arg_sg. */
13140 result_sg = fold_build2 (MODIFY_EXPR,
13141 TREE_TYPE (arg_sg), arg_sg,
13142 build_int_cst (NULL, sg));
13143 TREE_SIDE_EFFECTS (result_sg) = 1;
13144 /* Combine the signgam assignment with the lgamma result. */
13145 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13146 result_sg, result_lg));
13154 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13155 function FUNC on it and return the resulting value as a tree with
13156 type TYPE. The mpfr precision is set to the precision of TYPE. We
13157 assume that function FUNC returns zero if the result could be
13158 calculated exactly within the requested precision. */
13161 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13163 tree result = NULL_TREE;
13167 /* To proceed, MPFR must exactly represent the target floating point
13168 format, which only happens when the target base equals two. */
13169 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13170 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13171 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13173 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13174 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13176 if (real_isfinite (re) && real_isfinite (im))
13178 const struct real_format *const fmt =
13179 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13180 const int prec = fmt->p;
13181 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13182 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13186 mpc_init2 (m, prec);
13187 mpfr_from_real (mpc_realref(m), re, rnd);
13188 mpfr_from_real (mpc_imagref(m), im, rnd);
13189 mpfr_clear_flags ();
13190 inexact = func (m, m, crnd);
13191 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13199 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13200 mpc function FUNC on it and return the resulting value as a tree
13201 with type TYPE. The mpfr precision is set to the precision of
13202 TYPE. We assume that function FUNC returns zero if the result
13203 could be calculated exactly within the requested precision. If
13204 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13205 in the arguments and/or results. */
13208 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13209 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13211 tree result = NULL_TREE;
13216 /* To proceed, MPFR must exactly represent the target floating point
13217 format, which only happens when the target base equals two. */
13218 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13219 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13220 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13222 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13224 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13225 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13226 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13227 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13230 || (real_isfinite (re0) && real_isfinite (im0)
13231 && real_isfinite (re1) && real_isfinite (im1)))
13233 const struct real_format *const fmt =
13234 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13235 const int prec = fmt->p;
13236 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13237 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13241 mpc_init2 (m0, prec);
13242 mpc_init2 (m1, prec);
13243 mpfr_from_real (mpc_realref(m0), re0, rnd);
13244 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13245 mpfr_from_real (mpc_realref(m1), re1, rnd);
13246 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13247 mpfr_clear_flags ();
13248 inexact = func (m0, m0, m1, crnd);
13249 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13259 The functions below provide an alternate interface for folding
13260 builtin function calls presented as GIMPLE_CALL statements rather
13261 than as CALL_EXPRs. The folded result is still expressed as a
13262 tree. There is too much code duplication in the handling of
13263 varargs functions, and a more intrusive re-factoring would permit
13264 better sharing of code between the tree and statement-based
13265 versions of these functions. */
13267 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13268 along with N new arguments specified as the "..." parameters. SKIP
13269 is the number of arguments in STMT to be omitted. This function is used
13270 to do varargs-to-varargs transformations. */
13273 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13275 int oldnargs = gimple_call_num_args (stmt);
13276 int nargs = oldnargs - skip + n;
13277 tree fntype = TREE_TYPE (fndecl);
13278 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13282 location_t loc = gimple_location (stmt);
13284 buffer = XALLOCAVEC (tree, nargs);
13286 for (i = 0; i < n; i++)
13287 buffer[i] = va_arg (ap, tree);
13289 for (j = skip; j < oldnargs; j++, i++)
13290 buffer[i] = gimple_call_arg (stmt, j);
13292 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13295 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13296 a normal call should be emitted rather than expanding the function
13297 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13300 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13302 tree dest, size, len, fn, fmt, flag;
13303 const char *fmt_str;
13304 int nargs = gimple_call_num_args (stmt);
13306 /* Verify the required arguments in the original call. */
13309 dest = gimple_call_arg (stmt, 0);
13310 if (!validate_arg (dest, POINTER_TYPE))
13312 flag = gimple_call_arg (stmt, 1);
13313 if (!validate_arg (flag, INTEGER_TYPE))
13315 size = gimple_call_arg (stmt, 2);
13316 if (!validate_arg (size, INTEGER_TYPE))
13318 fmt = gimple_call_arg (stmt, 3);
13319 if (!validate_arg (fmt, POINTER_TYPE))
13322 if (! host_integerp (size, 1))
13327 if (!init_target_chars ())
13330 /* Check whether the format is a literal string constant. */
13331 fmt_str = c_getstr (fmt);
13332 if (fmt_str != NULL)
13334 /* If the format doesn't contain % args or %%, we know the size. */
13335 if (strchr (fmt_str, target_percent) == 0)
13337 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13338 len = build_int_cstu (size_type_node, strlen (fmt_str));
13340 /* If the format is "%s" and first ... argument is a string literal,
13341 we know the size too. */
13342 else if (fcode == BUILT_IN_SPRINTF_CHK
13343 && strcmp (fmt_str, target_percent_s) == 0)
13349 arg = gimple_call_arg (stmt, 4);
13350 if (validate_arg (arg, POINTER_TYPE))
13352 len = c_strlen (arg, 1);
13353 if (! len || ! host_integerp (len, 1))
13360 if (! integer_all_onesp (size))
13362 if (! len || ! tree_int_cst_lt (len, size))
13366 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13367 or if format doesn't contain % chars or is "%s". */
13368 if (! integer_zerop (flag))
13370 if (fmt_str == NULL)
13372 if (strchr (fmt_str, target_percent) != NULL
13373 && strcmp (fmt_str, target_percent_s))
13377 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13378 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13379 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13383 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13386 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13387 a normal call should be emitted rather than expanding the function
13388 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13389 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13390 passed as second argument. */
13393 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13394 enum built_in_function fcode)
13396 tree dest, size, len, fn, fmt, flag;
13397 const char *fmt_str;
13399 /* Verify the required arguments in the original call. */
13400 if (gimple_call_num_args (stmt) < 5)
13402 dest = gimple_call_arg (stmt, 0);
13403 if (!validate_arg (dest, POINTER_TYPE))
13405 len = gimple_call_arg (stmt, 1);
13406 if (!validate_arg (len, INTEGER_TYPE))
13408 flag = gimple_call_arg (stmt, 2);
13409 if (!validate_arg (flag, INTEGER_TYPE))
13411 size = gimple_call_arg (stmt, 3);
13412 if (!validate_arg (size, INTEGER_TYPE))
13414 fmt = gimple_call_arg (stmt, 4);
13415 if (!validate_arg (fmt, POINTER_TYPE))
13418 if (! host_integerp (size, 1))
13421 if (! integer_all_onesp (size))
13423 if (! host_integerp (len, 1))
13425 /* If LEN is not constant, try MAXLEN too.
13426 For MAXLEN only allow optimizing into non-_ocs function
13427 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13428 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13434 if (tree_int_cst_lt (size, maxlen))
13438 if (!init_target_chars ())
13441 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13442 or if format doesn't contain % chars or is "%s". */
13443 if (! integer_zerop (flag))
13445 fmt_str = c_getstr (fmt);
13446 if (fmt_str == NULL)
13448 if (strchr (fmt_str, target_percent) != NULL
13449 && strcmp (fmt_str, target_percent_s))
13453 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13455 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13456 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13460 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13463 /* Builtins with folding operations that operate on "..." arguments
13464 need special handling; we need to store the arguments in a convenient
13465 data structure before attempting any folding. Fortunately there are
13466 only a few builtins that fall into this category. FNDECL is the
13467 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13468 result of the function call is ignored. */
13471 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13472 bool ignore ATTRIBUTE_UNUSED)
13474 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13475 tree ret = NULL_TREE;
13479 case BUILT_IN_SPRINTF_CHK:
13480 case BUILT_IN_VSPRINTF_CHK:
13481 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13484 case BUILT_IN_SNPRINTF_CHK:
13485 case BUILT_IN_VSNPRINTF_CHK:
13486 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13493 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13494 TREE_NO_WARNING (ret) = 1;
13500 /* A wrapper function for builtin folding that prevents warnings for
13501 "statement without effect" and the like, caused by removing the
13502 call node earlier than the warning is generated. */
13505 fold_call_stmt (gimple stmt, bool ignore)
13507 tree ret = NULL_TREE;
13508 tree fndecl = gimple_call_fndecl (stmt);
13509 location_t loc = gimple_location (stmt);
13511 && TREE_CODE (fndecl) == FUNCTION_DECL
13512 && DECL_BUILT_IN (fndecl)
13513 && !gimple_call_va_arg_pack_p (stmt))
13515 int nargs = gimple_call_num_args (stmt);
13517 if (avoid_folding_inline_builtin (fndecl))
13519 /* FIXME: Don't use a list in this interface. */
13520 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13522 tree arglist = NULL_TREE;
13524 for (i = nargs - 1; i >= 0; i--)
13525 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13526 return targetm.fold_builtin (fndecl, arglist, ignore);
13530 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13532 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13534 for (i = 0; i < nargs; i++)
13535 args[i] = gimple_call_arg (stmt, i);
13536 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13539 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13542 /* Propagate location information from original call to
13543 expansion of builtin. Otherwise things like
13544 maybe_emit_chk_warning, that operate on the expansion
13545 of a builtin, will use the wrong location information. */
13546 if (gimple_has_location (stmt))
13548 tree realret = ret;
13549 if (TREE_CODE (ret) == NOP_EXPR)
13550 realret = TREE_OPERAND (ret, 0);
13551 if (CAN_HAVE_LOCATION_P (realret)
13552 && !EXPR_HAS_LOCATION (realret))
13553 SET_EXPR_LOCATION (realret, loc);
13563 /* Look up the function in built_in_decls that corresponds to DECL
13564 and set ASMSPEC as its user assembler name. DECL must be a
13565 function decl that declares a builtin. */
13568 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13571 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13572 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13575 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13576 set_user_assembler_name (builtin, asmspec);
13577 switch (DECL_FUNCTION_CODE (decl))
13579 case BUILT_IN_MEMCPY:
13580 init_block_move_fn (asmspec);
13581 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13583 case BUILT_IN_MEMSET:
13584 init_block_clear_fn (asmspec);
13585 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13587 case BUILT_IN_MEMMOVE:
13588 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13590 case BUILT_IN_MEMCMP:
13591 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13593 case BUILT_IN_ABORT:
13594 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);