1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
438 if (TREE_CODE (src) == COND_EXPR
439 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
443 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
444 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
445 if (tree_int_cst_equal (len1, len2))
449 if (TREE_CODE (src) == COMPOUND_EXPR
450 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
451 return c_strlen (TREE_OPERAND (src, 1), only_value);
453 src = string_constant (src, &offset_node);
457 max = TREE_STRING_LENGTH (src) - 1;
458 ptr = TREE_STRING_POINTER (src);
460 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
462 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
463 compute the offset to the following null if we don't know where to
464 start searching for it. */
467 for (i = 0; i < max; i++)
471 /* We don't know the starting offset, but we do know that the string
472 has no internal zero bytes. We can assume that the offset falls
473 within the bounds of the string; otherwise, the programmer deserves
474 what he gets. Subtract the offset from the length of the string,
475 and return that. This would perhaps not be valid if we were dealing
476 with named arrays in addition to literal string constants. */
478 return size_diffop_loc (input_location, size_int (max), offset_node);
481 /* We have a known offset into the string. Start searching there for
482 a null character if we can represent it as a single HOST_WIDE_INT. */
483 if (offset_node == 0)
485 else if (! host_integerp (offset_node, 0))
488 offset = tree_low_cst (offset_node, 0);
490 /* If the offset is known to be out of bounds, warn, and call strlen at
492 if (offset < 0 || offset > max)
494 /* Suppress multiple warnings for propagated constant strings. */
495 if (! TREE_NO_WARNING (src))
497 warning (0, "offset outside bounds of constant string");
498 TREE_NO_WARNING (src) = 1;
503 /* Use strlen to search for the first zero byte. Since any strings
504 constructed with build_string will have nulls appended, we win even
505 if we get handed something like (char[4])"abcd".
507 Since OFFSET is our starting index into the string, no further
508 calculation is needed. */
509 return ssize_int (strlen (ptr + offset));
512 /* Return a char pointer for a C string if it is a string constant
513 or sum of string constant and integer constant. */
520 src = string_constant (src, &offset_node);
524 if (offset_node == 0)
525 return TREE_STRING_POINTER (src);
526 else if (!host_integerp (offset_node, 1)
527 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
530 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
533 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
534 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
537 c_readstr (const char *str, enum machine_mode mode)
543 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
548 for (i = 0; i < GET_MODE_SIZE (mode); i++)
551 if (WORDS_BIG_ENDIAN)
552 j = GET_MODE_SIZE (mode) - i - 1;
553 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
554 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
555 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
557 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
560 ch = (unsigned char) str[i];
561 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
563 return immed_double_const (c[0], c[1], mode);
566 /* Cast a target constant CST to target CHAR and if that value fits into
567 host char type, return zero and put that value into variable pointed to by
571 target_char_cast (tree cst, char *p)
573 unsigned HOST_WIDE_INT val, hostval;
575 if (!host_integerp (cst, 1)
576 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
579 val = tree_low_cst (cst, 1);
580 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
581 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
584 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
585 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
594 /* Similar to save_expr, but assumes that arbitrary code is not executed
595 in between the multiple evaluations. In particular, we assume that a
596 non-addressable local variable will not be modified. */
599 builtin_save_expr (tree exp)
601 if (TREE_ADDRESSABLE (exp) == 0
602 && (TREE_CODE (exp) == PARM_DECL
603 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
606 return save_expr (exp);
609 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
610 times to get the address of either a higher stack frame, or a return
611 address located within it (depending on FNDECL_CODE). */
614 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
618 #ifdef INITIAL_FRAME_ADDRESS_RTX
619 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
623 /* For a zero count with __builtin_return_address, we don't care what
624 frame address we return, because target-specific definitions will
625 override us. Therefore frame pointer elimination is OK, and using
626 the soft frame pointer is OK.
628 For a nonzero count, or a zero count with __builtin_frame_address,
629 we require a stable offset from the current frame pointer to the
630 previous one, so we must use the hard frame pointer, and
631 we must disable frame pointer elimination. */
632 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
633 tem = frame_pointer_rtx;
636 tem = hard_frame_pointer_rtx;
638 /* Tell reload not to eliminate the frame pointer. */
639 crtl->accesses_prior_frames = 1;
643 /* Some machines need special handling before we can access
644 arbitrary frames. For example, on the SPARC, we must first flush
645 all register windows to the stack. */
646 #ifdef SETUP_FRAME_ADDRESSES
648 SETUP_FRAME_ADDRESSES ();
651 /* On the SPARC, the return address is not in the frame, it is in a
652 register. There is no way to access it off of the current frame
653 pointer, but it can be accessed off the previous frame pointer by
654 reading the value from the register window save area. */
655 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
656 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
660 /* Scan back COUNT frames to the specified frame. */
661 for (i = 0; i < count; i++)
663 /* Assume the dynamic chain pointer is in the word that the
664 frame address points to, unless otherwise specified. */
665 #ifdef DYNAMIC_CHAIN_ADDRESS
666 tem = DYNAMIC_CHAIN_ADDRESS (tem);
668 tem = memory_address (Pmode, tem);
669 tem = gen_frame_mem (Pmode, tem);
670 tem = copy_to_reg (tem);
673 /* For __builtin_frame_address, return what we've got. But, on
674 the SPARC for example, we may have to add a bias. */
675 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
676 #ifdef FRAME_ADDR_RTX
677 return FRAME_ADDR_RTX (tem);
682 /* For __builtin_return_address, get the return address from that frame. */
683 #ifdef RETURN_ADDR_RTX
684 tem = RETURN_ADDR_RTX (count, tem);
686 tem = memory_address (Pmode,
687 plus_constant (tem, GET_MODE_SIZE (Pmode)));
688 tem = gen_frame_mem (Pmode, tem);
693 /* Alias set used for setjmp buffer. */
694 static alias_set_type setjmp_alias_set = -1;
696 /* Construct the leading half of a __builtin_setjmp call. Control will
697 return to RECEIVER_LABEL. This is also called directly by the SJLJ
698 exception handling code. */
701 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
703 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
707 if (setjmp_alias_set == -1)
708 setjmp_alias_set = new_alias_set ();
710 buf_addr = convert_memory_address (Pmode, buf_addr);
712 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
714 /* We store the frame pointer and the address of receiver_label in
715 the buffer and use the rest of it for the stack save area, which
716 is machine-dependent. */
718 mem = gen_rtx_MEM (Pmode, buf_addr);
719 set_mem_alias_set (mem, setjmp_alias_set);
720 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
722 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
723 set_mem_alias_set (mem, setjmp_alias_set);
725 emit_move_insn (validize_mem (mem),
726 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
728 stack_save = gen_rtx_MEM (sa_mode,
729 plus_constant (buf_addr,
730 2 * GET_MODE_SIZE (Pmode)));
731 set_mem_alias_set (stack_save, setjmp_alias_set);
732 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
734 /* If there is further processing to do, do it. */
735 #ifdef HAVE_builtin_setjmp_setup
736 if (HAVE_builtin_setjmp_setup)
737 emit_insn (gen_builtin_setjmp_setup (buf_addr));
740 /* Tell optimize_save_area_alloca that extra work is going to
741 need to go on during alloca. */
742 cfun->calls_setjmp = 1;
744 /* We have a nonlocal label. */
745 cfun->has_nonlocal_label = 1;
748 /* Construct the trailing part of a __builtin_setjmp call. This is
749 also called directly by the SJLJ exception handling code. */
752 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
756 /* Clobber the FP when we get here, so we have to make sure it's
757 marked as used by this function. */
758 emit_use (hard_frame_pointer_rtx);
760 /* Mark the static chain as clobbered here so life information
761 doesn't get messed up for it. */
762 chain = targetm.calls.static_chain (current_function_decl, true);
763 if (chain && REG_P (chain))
764 emit_clobber (chain);
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
781 #ifdef ELIMINABLE_REGS
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
790 if (i == ARRAY_SIZE (elim_regs))
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 /* DRAP is needed for stack realign if longjmp is expanded to current
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
838 buf_addr = convert_memory_address (Pmode, buf_addr);
840 buf_addr = force_reg (Pmode, buf_addr);
842 /* We require that the user must pass a second argument of 1, because
843 that is what builtin_setjmp will return. */
844 gcc_assert (value == const1_rtx);
846 last = get_last_insn ();
847 #ifdef HAVE_builtin_longjmp
848 if (HAVE_builtin_longjmp)
849 emit_insn (gen_builtin_longjmp (buf_addr));
853 fp = gen_rtx_MEM (Pmode, buf_addr);
854 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
855 GET_MODE_SIZE (Pmode)));
857 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (fp, setjmp_alias_set);
860 set_mem_alias_set (lab, setjmp_alias_set);
861 set_mem_alias_set (stack, setjmp_alias_set);
863 /* Pick up FP, label, and SP from the block and jump. This code is
864 from expand_goto in stmt.c; see there for detailed comments. */
865 #ifdef HAVE_nonlocal_goto
866 if (HAVE_nonlocal_goto)
867 /* We have to pass a value to the nonlocal_goto pattern that will
868 get copied into the static_chain pointer, but it does not matter
869 what that value is, because builtin_setjmp does not use it. */
870 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
874 lab = copy_to_reg (lab);
876 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
877 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
879 emit_move_insn (hard_frame_pointer_rtx, fp);
880 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
882 emit_use (hard_frame_pointer_rtx);
883 emit_use (stack_pointer_rtx);
884 emit_indirect_jump (lab);
888 /* Search backwards and mark the jump insn as a non-local goto.
889 Note that this precludes the use of __builtin_longjmp to a
890 __builtin_setjmp target in the same function. However, we've
891 already cautioned the user that these functions are for
892 internal exception handling use only. */
893 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
895 gcc_assert (insn != last);
899 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
902 else if (CALL_P (insn))
907 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
908 and the address of the save area. */
911 expand_builtin_nonlocal_goto (tree exp)
913 tree t_label, t_save_area;
914 rtx r_label, r_save_area, r_fp, r_sp, insn;
916 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
919 t_label = CALL_EXPR_ARG (exp, 0);
920 t_save_area = CALL_EXPR_ARG (exp, 1);
922 r_label = expand_normal (t_label);
923 r_label = convert_memory_address (Pmode, r_label);
924 r_save_area = expand_normal (t_save_area);
925 r_save_area = convert_memory_address (Pmode, r_save_area);
926 /* Copy the address of the save location to a register just in case it was based
927 on the frame pointer. */
928 r_save_area = copy_to_reg (r_save_area);
929 r_fp = gen_rtx_MEM (Pmode, r_save_area);
930 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
931 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
933 crtl->has_nonlocal_goto = 1;
935 #ifdef HAVE_nonlocal_goto
936 /* ??? We no longer need to pass the static chain value, afaik. */
937 if (HAVE_nonlocal_goto)
938 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
942 r_label = copy_to_reg (r_label);
944 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
945 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
947 /* Restore frame pointer for containing function.
948 This sets the actual hard register used for the frame pointer
949 to the location of the function's incoming static chain info.
950 The non-local goto handler will then adjust it to contain the
951 proper value and reload the argument pointer, if needed. */
952 emit_move_insn (hard_frame_pointer_rtx, r_fp);
953 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
955 /* USE of hard_frame_pointer_rtx added for consistency;
956 not clear if really needed. */
957 emit_use (hard_frame_pointer_rtx);
958 emit_use (stack_pointer_rtx);
960 /* If the architecture is using a GP register, we must
961 conservatively assume that the target function makes use of it.
962 The prologue of functions with nonlocal gotos must therefore
963 initialize the GP register to the appropriate value, and we
964 must then make sure that this value is live at the point
965 of the jump. (Note that this doesn't necessarily apply
966 to targets with a nonlocal_goto pattern; they are free
967 to implement it in their own way. Note also that this is
968 a no-op if the GP register is a global invariant.) */
969 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
970 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
971 emit_use (pic_offset_table_rtx);
973 emit_indirect_jump (r_label);
976 /* Search backwards to the jump insn and mark it as a
978 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
982 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
985 else if (CALL_P (insn))
992 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
993 (not all will be used on all machines) that was passed to __builtin_setjmp.
994 It updates the stack pointer in that block to correspond to the current
998 expand_builtin_update_setjmp_buf (rtx buf_addr)
1000 enum machine_mode sa_mode = Pmode;
1004 #ifdef HAVE_save_stack_nonlocal
1005 if (HAVE_save_stack_nonlocal)
1006 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1008 #ifdef STACK_SAVEAREA_MODE
1009 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1013 = gen_rtx_MEM (sa_mode,
1016 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1020 emit_insn (gen_setjmp ());
1023 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1026 /* Expand a call to __builtin_prefetch. For a target that does not support
1027 data prefetch, evaluate the memory address argument in case it has side
1031 expand_builtin_prefetch (tree exp)
1033 tree arg0, arg1, arg2;
1037 if (!validate_arglist (exp, POINTER_TYPE, 0))
1040 arg0 = CALL_EXPR_ARG (exp, 0);
1042 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1043 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1045 nargs = call_expr_nargs (exp);
1047 arg1 = CALL_EXPR_ARG (exp, 1);
1049 arg1 = integer_zero_node;
1051 arg2 = CALL_EXPR_ARG (exp, 2);
1053 arg2 = build_int_cst (NULL_TREE, 3);
1055 /* Argument 0 is an address. */
1056 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1058 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1059 if (TREE_CODE (arg1) != INTEGER_CST)
1061 error ("second argument to %<__builtin_prefetch%> must be a constant");
1062 arg1 = integer_zero_node;
1064 op1 = expand_normal (arg1);
1065 /* Argument 1 must be either zero or one. */
1066 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1068 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1073 /* Argument 2 (locality) must be a compile-time constant int. */
1074 if (TREE_CODE (arg2) != INTEGER_CST)
1076 error ("third argument to %<__builtin_prefetch%> must be a constant");
1077 arg2 = integer_zero_node;
1079 op2 = expand_normal (arg2);
1080 /* Argument 2 must be 0, 1, 2, or 3. */
1081 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1083 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1087 #ifdef HAVE_prefetch
1090 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1092 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1093 || (GET_MODE (op0) != Pmode))
1095 op0 = convert_memory_address (Pmode, op0);
1096 op0 = force_reg (Pmode, op0);
1098 emit_insn (gen_prefetch (op0, op1, op2));
1102 /* Don't do anything with direct references to volatile memory, but
1103 generate code to handle other side effects. */
1104 if (!MEM_P (op0) && side_effects_p (op0))
1108 /* Get a MEM rtx for expression EXP which is the address of an operand
1109 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1110 the maximum length of the block of memory that might be accessed or
1114 get_memory_rtx (tree exp, tree len)
1116 tree orig_exp = exp;
1120 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1121 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1122 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1123 exp = TREE_OPERAND (exp, 0);
1125 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1126 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1128 /* Get an expression we can use to find the attributes to assign to MEM.
1129 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1130 we can. First remove any nops. */
1131 while (CONVERT_EXPR_P (exp)
1132 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1133 exp = TREE_OPERAND (exp, 0);
1136 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1137 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1138 && host_integerp (TREE_OPERAND (exp, 1), 0)
1139 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1140 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1141 else if (TREE_CODE (exp) == ADDR_EXPR)
1142 exp = TREE_OPERAND (exp, 0);
1143 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1144 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1148 /* Honor attributes derived from exp, except for the alias set
1149 (as builtin stringops may alias with anything) and the size
1150 (as stringops may access multiple array elements). */
1153 set_mem_attributes (mem, exp, 0);
1156 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1158 /* Allow the string and memory builtins to overflow from one
1159 field into another, see http://gcc.gnu.org/PR23561.
1160 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1161 memory accessed by the string or memory builtin will fit
1162 within the field. */
1163 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1165 tree mem_expr = MEM_EXPR (mem);
1166 HOST_WIDE_INT offset = -1, length = -1;
1169 while (TREE_CODE (inner) == ARRAY_REF
1170 || CONVERT_EXPR_P (inner)
1171 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1172 || TREE_CODE (inner) == SAVE_EXPR)
1173 inner = TREE_OPERAND (inner, 0);
1175 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1177 if (MEM_OFFSET (mem)
1178 && CONST_INT_P (MEM_OFFSET (mem)))
1179 offset = INTVAL (MEM_OFFSET (mem));
1181 if (offset >= 0 && len && host_integerp (len, 0))
1182 length = tree_low_cst (len, 0);
1184 while (TREE_CODE (inner) == COMPONENT_REF)
1186 tree field = TREE_OPERAND (inner, 1);
1187 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1188 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1190 /* Bitfields are generally not byte-addressable. */
1191 gcc_assert (!DECL_BIT_FIELD (field)
1192 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1193 % BITS_PER_UNIT) == 0
1194 && host_integerp (DECL_SIZE (field), 0)
1195 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1196 % BITS_PER_UNIT) == 0));
1198 /* If we can prove that the memory starting at XEXP (mem, 0) and
1199 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1200 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1201 fields without DECL_SIZE_UNIT like flexible array members. */
1203 && DECL_SIZE_UNIT (field)
1204 && host_integerp (DECL_SIZE_UNIT (field), 0))
1207 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1210 && offset + length <= size)
1215 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1216 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1217 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1225 mem_expr = TREE_OPERAND (mem_expr, 0);
1226 inner = TREE_OPERAND (inner, 0);
1229 if (mem_expr == NULL)
1231 if (mem_expr != MEM_EXPR (mem))
1233 set_mem_expr (mem, mem_expr);
1234 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1237 set_mem_alias_set (mem, 0);
1238 set_mem_size (mem, NULL_RTX);
1244 /* Built-in functions to perform an untyped call and return. */
1246 /* For each register that may be used for calling a function, this
1247 gives a mode used to copy the register's value. VOIDmode indicates
1248 the register is not used for calling a function. If the machine
1249 has register windows, this gives only the outbound registers.
1250 INCOMING_REGNO gives the corresponding inbound register. */
1251 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1253 /* For each register that may be used for returning values, this gives
1254 a mode used to copy the register's value. VOIDmode indicates the
1255 register is not used for returning values. If the machine has
1256 register windows, this gives only the outbound registers.
1257 INCOMING_REGNO gives the corresponding inbound register. */
1258 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1260 /* Return the size required for the block returned by __builtin_apply_args,
1261 and initialize apply_args_mode. */
1264 apply_args_size (void)
1266 static int size = -1;
1269 enum machine_mode mode;
1271 /* The values computed by this function never change. */
1274 /* The first value is the incoming arg-pointer. */
1275 size = GET_MODE_SIZE (Pmode);
1277 /* The second value is the structure value address unless this is
1278 passed as an "invisible" first argument. */
1279 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1280 size += GET_MODE_SIZE (Pmode);
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if (FUNCTION_ARG_REGNO_P (regno))
1285 mode = reg_raw_mode[regno];
1287 gcc_assert (mode != VOIDmode);
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 size += GET_MODE_SIZE (mode);
1293 apply_args_mode[regno] = mode;
1297 apply_args_mode[regno] = VOIDmode;
1303 /* Return the size required for the block returned by __builtin_apply,
1304 and initialize apply_result_mode. */
1307 apply_result_size (void)
1309 static int size = -1;
1311 enum machine_mode mode;
1313 /* The values computed by this function never change. */
1318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1319 if (FUNCTION_VALUE_REGNO_P (regno))
1321 mode = reg_raw_mode[regno];
1323 gcc_assert (mode != VOIDmode);
1325 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1326 if (size % align != 0)
1327 size = CEIL (size, align) * align;
1328 size += GET_MODE_SIZE (mode);
1329 apply_result_mode[regno] = mode;
1332 apply_result_mode[regno] = VOIDmode;
1334 /* Allow targets that use untyped_call and untyped_return to override
1335 the size so that machine-specific information can be stored here. */
1336 #ifdef APPLY_RESULT_SIZE
1337 size = APPLY_RESULT_SIZE;
1343 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1344 /* Create a vector describing the result block RESULT. If SAVEP is true,
1345 the result block is used to save the values; otherwise it is used to
1346 restore the values. */
1349 result_vector (int savep, rtx result)
1351 int regno, size, align, nelts;
1352 enum machine_mode mode;
1354 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1357 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1358 if ((mode = apply_result_mode[regno]) != VOIDmode)
1360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1361 if (size % align != 0)
1362 size = CEIL (size, align) * align;
1363 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1364 mem = adjust_address (result, mode, size);
1365 savevec[nelts++] = (savep
1366 ? gen_rtx_SET (VOIDmode, mem, reg)
1367 : gen_rtx_SET (VOIDmode, reg, mem));
1368 size += GET_MODE_SIZE (mode);
1370 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1372 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1374 /* Save the state required to perform an untyped call with the same
1375 arguments as were passed to the current function. */
1378 expand_builtin_apply_args_1 (void)
1381 int size, align, regno;
1382 enum machine_mode mode;
1383 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1385 /* Create a block where the arg-pointer, structure value address,
1386 and argument registers can be saved. */
1387 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1389 /* Walk past the arg-pointer and structure value address. */
1390 size = GET_MODE_SIZE (Pmode);
1391 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1392 size += GET_MODE_SIZE (Pmode);
1394 /* Save each register used in calling a function to the block. */
1395 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1396 if ((mode = apply_args_mode[regno]) != VOIDmode)
1398 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1399 if (size % align != 0)
1400 size = CEIL (size, align) * align;
1402 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1404 emit_move_insn (adjust_address (registers, mode, size), tem);
1405 size += GET_MODE_SIZE (mode);
1408 /* Save the arg pointer to the block. */
1409 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1410 #ifdef STACK_GROWS_DOWNWARD
1411 /* We need the pointer as the caller actually passed them to us, not
1412 as we might have pretended they were passed. Make sure it's a valid
1413 operand, as emit_move_insn isn't expected to handle a PLUS. */
1415 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1418 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1420 size = GET_MODE_SIZE (Pmode);
1422 /* Save the structure value address unless this is passed as an
1423 "invisible" first argument. */
1424 if (struct_incoming_value)
1426 emit_move_insn (adjust_address (registers, Pmode, size),
1427 copy_to_reg (struct_incoming_value));
1428 size += GET_MODE_SIZE (Pmode);
1431 /* Return the address of the block. */
1432 return copy_addr_to_reg (XEXP (registers, 0));
1435 /* __builtin_apply_args returns block of memory allocated on
1436 the stack into which is stored the arg pointer, structure
1437 value address, static chain, and all the registers that might
1438 possibly be used in performing a function call. The code is
1439 moved to the start of the function so the incoming values are
1443 expand_builtin_apply_args (void)
1445 /* Don't do __builtin_apply_args more than once in a function.
1446 Save the result of the first call and reuse it. */
1447 if (apply_args_value != 0)
1448 return apply_args_value;
1450 /* When this function is called, it means that registers must be
1451 saved on entry to this function. So we migrate the
1452 call to the first insn of this function. */
1457 temp = expand_builtin_apply_args_1 ();
1461 apply_args_value = temp;
1463 /* Put the insns after the NOTE that starts the function.
1464 If this is inside a start_sequence, make the outer-level insn
1465 chain current, so the code is placed at the start of the
1466 function. If internal_arg_pointer is a non-virtual pseudo,
1467 it needs to be placed after the function that initializes
1469 push_topmost_sequence ();
1470 if (REG_P (crtl->args.internal_arg_pointer)
1471 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1472 emit_insn_before (seq, parm_birth_insn);
1474 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1475 pop_topmost_sequence ();
1480 /* Perform an untyped call and save the state required to perform an
1481 untyped return of whatever value was returned by the given function. */
1484 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1486 int size, align, regno;
1487 enum machine_mode mode;
1488 rtx incoming_args, result, reg, dest, src, call_insn;
1489 rtx old_stack_level = 0;
1490 rtx call_fusage = 0;
1491 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1493 arguments = convert_memory_address (Pmode, arguments);
1495 /* Create a block where the return registers can be saved. */
1496 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1498 /* Fetch the arg pointer from the ARGUMENTS block. */
1499 incoming_args = gen_reg_rtx (Pmode);
1500 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1501 #ifndef STACK_GROWS_DOWNWARD
1502 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1503 incoming_args, 0, OPTAB_LIB_WIDEN);
1506 /* Push a new argument block and copy the arguments. Do not allow
1507 the (potential) memcpy call below to interfere with our stack
1509 do_pending_stack_adjust ();
1512 /* Save the stack with nonlocal if available. */
1513 #ifdef HAVE_save_stack_nonlocal
1514 if (HAVE_save_stack_nonlocal)
1515 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1518 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1520 /* Allocate a block of memory onto the stack and copy the memory
1521 arguments to the outgoing arguments address. */
1522 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1524 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1525 may have already set current_function_calls_alloca to true.
1526 current_function_calls_alloca won't be set if argsize is zero,
1527 so we have to guarantee need_drap is true here. */
1528 if (SUPPORTS_STACK_ALIGNMENT)
1529 crtl->need_drap = true;
1531 dest = virtual_outgoing_args_rtx;
1532 #ifndef STACK_GROWS_DOWNWARD
1533 if (CONST_INT_P (argsize))
1534 dest = plus_constant (dest, -INTVAL (argsize));
1536 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1538 dest = gen_rtx_MEM (BLKmode, dest);
1539 set_mem_align (dest, PARM_BOUNDARY);
1540 src = gen_rtx_MEM (BLKmode, incoming_args);
1541 set_mem_align (src, PARM_BOUNDARY);
1542 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1544 /* Refer to the argument block. */
1546 arguments = gen_rtx_MEM (BLKmode, arguments);
1547 set_mem_align (arguments, PARM_BOUNDARY);
1549 /* Walk past the arg-pointer and structure value address. */
1550 size = GET_MODE_SIZE (Pmode);
1552 size += GET_MODE_SIZE (Pmode);
1554 /* Restore each of the registers previously saved. Make USE insns
1555 for each of these registers for use in making the call. */
1556 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1557 if ((mode = apply_args_mode[regno]) != VOIDmode)
1559 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1560 if (size % align != 0)
1561 size = CEIL (size, align) * align;
1562 reg = gen_rtx_REG (mode, regno);
1563 emit_move_insn (reg, adjust_address (arguments, mode, size));
1564 use_reg (&call_fusage, reg);
1565 size += GET_MODE_SIZE (mode);
1568 /* Restore the structure value address unless this is passed as an
1569 "invisible" first argument. */
1570 size = GET_MODE_SIZE (Pmode);
1573 rtx value = gen_reg_rtx (Pmode);
1574 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1575 emit_move_insn (struct_value, value);
1576 if (REG_P (struct_value))
1577 use_reg (&call_fusage, struct_value);
1578 size += GET_MODE_SIZE (Pmode);
1581 /* All arguments and registers used for the call are set up by now! */
1582 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1584 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1585 and we don't want to load it into a register as an optimization,
1586 because prepare_call_address already did it if it should be done. */
1587 if (GET_CODE (function) != SYMBOL_REF)
1588 function = memory_address (FUNCTION_MODE, function);
1590 /* Generate the actual call instruction and save the return value. */
1591 #ifdef HAVE_untyped_call
1592 if (HAVE_untyped_call)
1593 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1594 result, result_vector (1, result)));
1597 #ifdef HAVE_call_value
1598 if (HAVE_call_value)
1602 /* Locate the unique return register. It is not possible to
1603 express a call that sets more than one return register using
1604 call_value; use untyped_call for that. In fact, untyped_call
1605 only needs to save the return registers in the given block. */
1606 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1607 if ((mode = apply_result_mode[regno]) != VOIDmode)
1609 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1611 valreg = gen_rtx_REG (mode, regno);
1614 emit_call_insn (GEN_CALL_VALUE (valreg,
1615 gen_rtx_MEM (FUNCTION_MODE, function),
1616 const0_rtx, NULL_RTX, const0_rtx));
1618 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1624 /* Find the CALL insn we just emitted, and attach the register usage
1626 call_insn = last_call_insn ();
1627 add_function_usage_to (call_insn, call_fusage);
1629 /* Restore the stack. */
1630 #ifdef HAVE_save_stack_nonlocal
1631 if (HAVE_save_stack_nonlocal)
1632 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1635 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1639 /* Return the address of the result block. */
1640 result = copy_addr_to_reg (XEXP (result, 0));
1641 return convert_memory_address (ptr_mode, result);
1644 /* Perform an untyped return. */
1647 expand_builtin_return (rtx result)
1649 int size, align, regno;
1650 enum machine_mode mode;
1652 rtx call_fusage = 0;
1654 result = convert_memory_address (Pmode, result);
1656 apply_result_size ();
1657 result = gen_rtx_MEM (BLKmode, result);
1659 #ifdef HAVE_untyped_return
1660 if (HAVE_untyped_return)
1662 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1668 /* Restore the return value and note that each value is used. */
1670 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1671 if ((mode = apply_result_mode[regno]) != VOIDmode)
1673 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1674 if (size % align != 0)
1675 size = CEIL (size, align) * align;
1676 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1677 emit_move_insn (reg, adjust_address (result, mode, size));
1679 push_to_sequence (call_fusage);
1681 call_fusage = get_insns ();
1683 size += GET_MODE_SIZE (mode);
1686 /* Put the USE insns before the return. */
1687 emit_insn (call_fusage);
1689 /* Return whatever values was restored by jumping directly to the end
1691 expand_naked_return ();
1694 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1696 static enum type_class
1697 type_to_class (tree type)
1699 switch (TREE_CODE (type))
1701 case VOID_TYPE: return void_type_class;
1702 case INTEGER_TYPE: return integer_type_class;
1703 case ENUMERAL_TYPE: return enumeral_type_class;
1704 case BOOLEAN_TYPE: return boolean_type_class;
1705 case POINTER_TYPE: return pointer_type_class;
1706 case REFERENCE_TYPE: return reference_type_class;
1707 case OFFSET_TYPE: return offset_type_class;
1708 case REAL_TYPE: return real_type_class;
1709 case COMPLEX_TYPE: return complex_type_class;
1710 case FUNCTION_TYPE: return function_type_class;
1711 case METHOD_TYPE: return method_type_class;
1712 case RECORD_TYPE: return record_type_class;
1714 case QUAL_UNION_TYPE: return union_type_class;
1715 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1716 ? string_type_class : array_type_class);
1717 case LANG_TYPE: return lang_type_class;
1718 default: return no_type_class;
1722 /* Expand a call EXP to __builtin_classify_type. */
1725 expand_builtin_classify_type (tree exp)
1727 if (call_expr_nargs (exp))
1728 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1729 return GEN_INT (no_type_class);
1732 /* This helper macro, meant to be used in mathfn_built_in below,
1733 determines which among a set of three builtin math functions is
1734 appropriate for a given type mode. The `F' and `L' cases are
1735 automatically generated from the `double' case. */
1736 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1737 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1738 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1739 fcodel = BUILT_IN_MATHFN##L ; break;
1740 /* Similar to above, but appends _R after any F/L suffix. */
1741 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1742 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1743 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1744 fcodel = BUILT_IN_MATHFN##L_R ; break;
1746 /* Return mathematic function equivalent to FN but operating directly
1747 on TYPE, if available. If IMPLICIT is true find the function in
1748 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1749 can't do the conversion, return zero. */
1752 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1754 tree const *const fn_arr
1755 = implicit ? implicit_built_in_decls : built_in_decls;
1756 enum built_in_function fcode, fcodef, fcodel;
1760 CASE_MATHFN (BUILT_IN_ACOS)
1761 CASE_MATHFN (BUILT_IN_ACOSH)
1762 CASE_MATHFN (BUILT_IN_ASIN)
1763 CASE_MATHFN (BUILT_IN_ASINH)
1764 CASE_MATHFN (BUILT_IN_ATAN)
1765 CASE_MATHFN (BUILT_IN_ATAN2)
1766 CASE_MATHFN (BUILT_IN_ATANH)
1767 CASE_MATHFN (BUILT_IN_CBRT)
1768 CASE_MATHFN (BUILT_IN_CEIL)
1769 CASE_MATHFN (BUILT_IN_CEXPI)
1770 CASE_MATHFN (BUILT_IN_COPYSIGN)
1771 CASE_MATHFN (BUILT_IN_COS)
1772 CASE_MATHFN (BUILT_IN_COSH)
1773 CASE_MATHFN (BUILT_IN_DREM)
1774 CASE_MATHFN (BUILT_IN_ERF)
1775 CASE_MATHFN (BUILT_IN_ERFC)
1776 CASE_MATHFN (BUILT_IN_EXP)
1777 CASE_MATHFN (BUILT_IN_EXP10)
1778 CASE_MATHFN (BUILT_IN_EXP2)
1779 CASE_MATHFN (BUILT_IN_EXPM1)
1780 CASE_MATHFN (BUILT_IN_FABS)
1781 CASE_MATHFN (BUILT_IN_FDIM)
1782 CASE_MATHFN (BUILT_IN_FLOOR)
1783 CASE_MATHFN (BUILT_IN_FMA)
1784 CASE_MATHFN (BUILT_IN_FMAX)
1785 CASE_MATHFN (BUILT_IN_FMIN)
1786 CASE_MATHFN (BUILT_IN_FMOD)
1787 CASE_MATHFN (BUILT_IN_FREXP)
1788 CASE_MATHFN (BUILT_IN_GAMMA)
1789 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1790 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1791 CASE_MATHFN (BUILT_IN_HYPOT)
1792 CASE_MATHFN (BUILT_IN_ILOGB)
1793 CASE_MATHFN (BUILT_IN_INF)
1794 CASE_MATHFN (BUILT_IN_ISINF)
1795 CASE_MATHFN (BUILT_IN_J0)
1796 CASE_MATHFN (BUILT_IN_J1)
1797 CASE_MATHFN (BUILT_IN_JN)
1798 CASE_MATHFN (BUILT_IN_LCEIL)
1799 CASE_MATHFN (BUILT_IN_LDEXP)
1800 CASE_MATHFN (BUILT_IN_LFLOOR)
1801 CASE_MATHFN (BUILT_IN_LGAMMA)
1802 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1803 CASE_MATHFN (BUILT_IN_LLCEIL)
1804 CASE_MATHFN (BUILT_IN_LLFLOOR)
1805 CASE_MATHFN (BUILT_IN_LLRINT)
1806 CASE_MATHFN (BUILT_IN_LLROUND)
1807 CASE_MATHFN (BUILT_IN_LOG)
1808 CASE_MATHFN (BUILT_IN_LOG10)
1809 CASE_MATHFN (BUILT_IN_LOG1P)
1810 CASE_MATHFN (BUILT_IN_LOG2)
1811 CASE_MATHFN (BUILT_IN_LOGB)
1812 CASE_MATHFN (BUILT_IN_LRINT)
1813 CASE_MATHFN (BUILT_IN_LROUND)
1814 CASE_MATHFN (BUILT_IN_MODF)
1815 CASE_MATHFN (BUILT_IN_NAN)
1816 CASE_MATHFN (BUILT_IN_NANS)
1817 CASE_MATHFN (BUILT_IN_NEARBYINT)
1818 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1819 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1820 CASE_MATHFN (BUILT_IN_POW)
1821 CASE_MATHFN (BUILT_IN_POWI)
1822 CASE_MATHFN (BUILT_IN_POW10)
1823 CASE_MATHFN (BUILT_IN_REMAINDER)
1824 CASE_MATHFN (BUILT_IN_REMQUO)
1825 CASE_MATHFN (BUILT_IN_RINT)
1826 CASE_MATHFN (BUILT_IN_ROUND)
1827 CASE_MATHFN (BUILT_IN_SCALB)
1828 CASE_MATHFN (BUILT_IN_SCALBLN)
1829 CASE_MATHFN (BUILT_IN_SCALBN)
1830 CASE_MATHFN (BUILT_IN_SIGNBIT)
1831 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1832 CASE_MATHFN (BUILT_IN_SIN)
1833 CASE_MATHFN (BUILT_IN_SINCOS)
1834 CASE_MATHFN (BUILT_IN_SINH)
1835 CASE_MATHFN (BUILT_IN_SQRT)
1836 CASE_MATHFN (BUILT_IN_TAN)
1837 CASE_MATHFN (BUILT_IN_TANH)
1838 CASE_MATHFN (BUILT_IN_TGAMMA)
1839 CASE_MATHFN (BUILT_IN_TRUNC)
1840 CASE_MATHFN (BUILT_IN_Y0)
1841 CASE_MATHFN (BUILT_IN_Y1)
1842 CASE_MATHFN (BUILT_IN_YN)
1848 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1849 return fn_arr[fcode];
1850 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1851 return fn_arr[fcodef];
1852 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1853 return fn_arr[fcodel];
1858 /* Like mathfn_built_in_1(), but always use the implicit array. */
1861 mathfn_built_in (tree type, enum built_in_function fn)
1863 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1866 /* If errno must be maintained, expand the RTL to check if the result,
1867 TARGET, of a built-in function call, EXP, is NaN, and if so set
1871 expand_errno_check (tree exp, rtx target)
1873 rtx lab = gen_label_rtx ();
1875 /* Test the result; if it is NaN, set errno=EDOM because
1876 the argument was not in the domain. */
1877 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1878 NULL_RTX, NULL_RTX, lab);
1881 /* If this built-in doesn't throw an exception, set errno directly. */
1882 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1884 #ifdef GEN_ERRNO_RTX
1885 rtx errno_rtx = GEN_ERRNO_RTX;
1888 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1890 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1896 /* Make sure the library call isn't expanded as a tail call. */
1897 CALL_EXPR_TAILCALL (exp) = 0;
1899 /* We can't set errno=EDOM directly; let the library call do it.
1900 Pop the arguments right away in case the call gets deleted. */
1902 expand_call (exp, target, 0);
1907 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1908 Return NULL_RTX if a normal call should be emitted rather than expanding
1909 the function in-line. EXP is the expression that is a call to the builtin
1910 function; if convenient, the result should be placed in TARGET.
1911 SUBTARGET may be used as the target for computing one of EXP's operands. */
1914 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1916 optab builtin_optab;
1918 tree fndecl = get_callee_fndecl (exp);
1919 enum machine_mode mode;
1920 bool errno_set = false;
1923 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1926 arg = CALL_EXPR_ARG (exp, 0);
1928 switch (DECL_FUNCTION_CODE (fndecl))
1930 CASE_FLT_FN (BUILT_IN_SQRT):
1931 errno_set = ! tree_expr_nonnegative_p (arg);
1932 builtin_optab = sqrt_optab;
1934 CASE_FLT_FN (BUILT_IN_EXP):
1935 errno_set = true; builtin_optab = exp_optab; break;
1936 CASE_FLT_FN (BUILT_IN_EXP10):
1937 CASE_FLT_FN (BUILT_IN_POW10):
1938 errno_set = true; builtin_optab = exp10_optab; break;
1939 CASE_FLT_FN (BUILT_IN_EXP2):
1940 errno_set = true; builtin_optab = exp2_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXPM1):
1942 errno_set = true; builtin_optab = expm1_optab; break;
1943 CASE_FLT_FN (BUILT_IN_LOGB):
1944 errno_set = true; builtin_optab = logb_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOG):
1946 errno_set = true; builtin_optab = log_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG10):
1948 errno_set = true; builtin_optab = log10_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOG2):
1950 errno_set = true; builtin_optab = log2_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG1P):
1952 errno_set = true; builtin_optab = log1p_optab; break;
1953 CASE_FLT_FN (BUILT_IN_ASIN):
1954 builtin_optab = asin_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ACOS):
1956 builtin_optab = acos_optab; break;
1957 CASE_FLT_FN (BUILT_IN_TAN):
1958 builtin_optab = tan_optab; break;
1959 CASE_FLT_FN (BUILT_IN_ATAN):
1960 builtin_optab = atan_optab; break;
1961 CASE_FLT_FN (BUILT_IN_FLOOR):
1962 builtin_optab = floor_optab; break;
1963 CASE_FLT_FN (BUILT_IN_CEIL):
1964 builtin_optab = ceil_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TRUNC):
1966 builtin_optab = btrunc_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ROUND):
1968 builtin_optab = round_optab; break;
1969 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1970 builtin_optab = nearbyint_optab;
1971 if (flag_trapping_math)
1973 /* Else fallthrough and expand as rint. */
1974 CASE_FLT_FN (BUILT_IN_RINT):
1975 builtin_optab = rint_optab; break;
1976 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1977 builtin_optab = significand_optab; break;
1982 /* Make a suitable register to place result in. */
1983 mode = TYPE_MODE (TREE_TYPE (exp));
1985 if (! flag_errno_math || ! HONOR_NANS (mode))
1988 /* Before working hard, check whether the instruction is available. */
1989 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1991 target = gen_reg_rtx (mode);
1993 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1994 need to expand the argument again. This way, we will not perform
1995 side-effects more the once. */
1996 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1998 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2002 /* Compute into TARGET.
2003 Set TARGET to wherever the result comes back. */
2004 target = expand_unop (mode, builtin_optab, op0, target, 0);
2009 expand_errno_check (exp, target);
2011 /* Output the entire sequence. */
2012 insns = get_insns ();
2018 /* If we were unable to expand via the builtin, stop the sequence
2019 (without outputting the insns) and call to the library function
2020 with the stabilized argument list. */
2024 return expand_call (exp, target, target == const0_rtx);
2027 /* Expand a call to the builtin binary math functions (pow and atan2).
2028 Return NULL_RTX if a normal call should be emitted rather than expanding the
2029 function in-line. EXP is the expression that is a call to the builtin
2030 function; if convenient, the result should be placed in TARGET.
2031 SUBTARGET may be used as the target for computing one of EXP's
2035 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2037 optab builtin_optab;
2038 rtx op0, op1, insns;
2039 int op1_type = REAL_TYPE;
2040 tree fndecl = get_callee_fndecl (exp);
2042 enum machine_mode mode;
2043 bool errno_set = true;
2045 switch (DECL_FUNCTION_CODE (fndecl))
2047 CASE_FLT_FN (BUILT_IN_SCALBN):
2048 CASE_FLT_FN (BUILT_IN_SCALBLN):
2049 CASE_FLT_FN (BUILT_IN_LDEXP):
2050 op1_type = INTEGER_TYPE;
2055 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2058 arg0 = CALL_EXPR_ARG (exp, 0);
2059 arg1 = CALL_EXPR_ARG (exp, 1);
2061 switch (DECL_FUNCTION_CODE (fndecl))
2063 CASE_FLT_FN (BUILT_IN_POW):
2064 builtin_optab = pow_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN2):
2066 builtin_optab = atan2_optab; break;
2067 CASE_FLT_FN (BUILT_IN_SCALB):
2068 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2070 builtin_optab = scalb_optab; break;
2071 CASE_FLT_FN (BUILT_IN_SCALBN):
2072 CASE_FLT_FN (BUILT_IN_SCALBLN):
2073 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2075 /* Fall through... */
2076 CASE_FLT_FN (BUILT_IN_LDEXP):
2077 builtin_optab = ldexp_optab; break;
2078 CASE_FLT_FN (BUILT_IN_FMOD):
2079 builtin_optab = fmod_optab; break;
2080 CASE_FLT_FN (BUILT_IN_REMAINDER):
2081 CASE_FLT_FN (BUILT_IN_DREM):
2082 builtin_optab = remainder_optab; break;
2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
2090 /* Before working hard, check whether the instruction is available. */
2091 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 target = gen_reg_rtx (mode);
2096 if (! flag_errno_math || ! HONOR_NANS (mode))
2099 /* Always stabilize the argument list. */
2100 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2101 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2103 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2104 op1 = expand_normal (arg1);
2108 /* Compute into TARGET.
2109 Set TARGET to wherever the result comes back. */
2110 target = expand_binop (mode, builtin_optab, op0, op1,
2111 target, 0, OPTAB_DIRECT);
2113 /* If we were unable to expand via the builtin, stop the sequence
2114 (without outputting the insns) and call to the library function
2115 with the stabilized argument list. */
2119 return expand_call (exp, target, target == const0_rtx);
2123 expand_errno_check (exp, target);
2125 /* Output the entire sequence. */
2126 insns = get_insns ();
2133 /* Expand a call to the builtin sin and cos math functions.
2134 Return NULL_RTX if a normal call should be emitted rather than expanding the
2135 function in-line. EXP is the expression that is a call to the builtin
2136 function; if convenient, the result should be placed in TARGET.
2137 SUBTARGET may be used as the target for computing one of EXP's
2141 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2143 optab builtin_optab;
2145 tree fndecl = get_callee_fndecl (exp);
2146 enum machine_mode mode;
2149 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2152 arg = CALL_EXPR_ARG (exp, 0);
2154 switch (DECL_FUNCTION_CODE (fndecl))
2156 CASE_FLT_FN (BUILT_IN_SIN):
2157 CASE_FLT_FN (BUILT_IN_COS):
2158 builtin_optab = sincos_optab; break;
2163 /* Make a suitable register to place result in. */
2164 mode = TYPE_MODE (TREE_TYPE (exp));
2166 /* Check if sincos insn is available, otherwise fallback
2167 to sin or cos insn. */
2168 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2169 switch (DECL_FUNCTION_CODE (fndecl))
2171 CASE_FLT_FN (BUILT_IN_SIN):
2172 builtin_optab = sin_optab; break;
2173 CASE_FLT_FN (BUILT_IN_COS):
2174 builtin_optab = cos_optab; break;
2179 /* Before working hard, check whether the instruction is available. */
2180 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2182 target = gen_reg_rtx (mode);
2184 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2185 need to expand the argument again. This way, we will not perform
2186 side-effects more the once. */
2187 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2189 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2193 /* Compute into TARGET.
2194 Set TARGET to wherever the result comes back. */
2195 if (builtin_optab == sincos_optab)
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_SIN):
2202 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2204 CASE_FLT_FN (BUILT_IN_COS):
2205 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2210 gcc_assert (result);
2214 target = expand_unop (mode, builtin_optab, op0, target, 0);
2219 /* Output the entire sequence. */
2220 insns = get_insns ();
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2232 target = expand_call (exp, target, target == const0_rtx);
2237 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2238 return an RTL instruction code that implements the functionality.
2239 If that isn't possible or available return CODE_FOR_nothing. */
2241 static enum insn_code
2242 interclass_mathfn_icode (tree arg, tree fndecl)
2244 bool errno_set = false;
2245 optab builtin_optab = 0;
2246 enum machine_mode mode;
2248 switch (DECL_FUNCTION_CODE (fndecl))
2250 CASE_FLT_FN (BUILT_IN_ILOGB):
2251 errno_set = true; builtin_optab = ilogb_optab; break;
2252 CASE_FLT_FN (BUILT_IN_ISINF):
2253 builtin_optab = isinf_optab; break;
2254 case BUILT_IN_ISNORMAL:
2255 case BUILT_IN_ISFINITE:
2256 CASE_FLT_FN (BUILT_IN_FINITE):
2257 case BUILT_IN_FINITED32:
2258 case BUILT_IN_FINITED64:
2259 case BUILT_IN_FINITED128:
2260 case BUILT_IN_ISINFD32:
2261 case BUILT_IN_ISINFD64:
2262 case BUILT_IN_ISINFD128:
2263 /* These builtins have no optabs (yet). */
2269 /* There's no easy way to detect the case we need to set EDOM. */
2270 if (flag_errno_math && errno_set)
2271 return CODE_FOR_nothing;
2273 /* Optab mode depends on the mode of the input argument. */
2274 mode = TYPE_MODE (TREE_TYPE (arg));
2277 return optab_handler (builtin_optab, mode)->insn_code;
2278 return CODE_FOR_nothing;
2281 /* Expand a call to one of the builtin math functions that operate on
2282 floating point argument and output an integer result (ilogb, isinf,
2284 Return 0 if a normal call should be emitted rather than expanding the
2285 function in-line. EXP is the expression that is a call to the builtin
2286 function; if convenient, the result should be placed in TARGET.
2287 SUBTARGET may be used as the target for computing one of EXP's operands. */
2290 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2292 enum insn_code icode = CODE_FOR_nothing;
2294 tree fndecl = get_callee_fndecl (exp);
2295 enum machine_mode mode;
2298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2301 arg = CALL_EXPR_ARG (exp, 0);
2302 icode = interclass_mathfn_icode (arg, fndecl);
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2305 if (icode != CODE_FOR_nothing)
2307 /* Make a suitable register to place result in. */
2309 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2310 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2312 gcc_assert (insn_data[icode].operand[0].predicate
2313 (target, GET_MODE (target)));
2315 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2316 need to expand the argument again. This way, we will not perform
2317 side-effects more the once. */
2318 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2320 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2322 if (mode != GET_MODE (op0))
2323 op0 = convert_to_mode (mode, op0, 0);
2325 /* Compute into TARGET.
2326 Set TARGET to wherever the result comes back. */
2327 emit_unop_insn (icode, target, op0, UNKNOWN);
2334 /* Expand a call to the builtin sincos math function.
2335 Return NULL_RTX if a normal call should be emitted rather than expanding the
2336 function in-line. EXP is the expression that is a call to the builtin
2340 expand_builtin_sincos (tree exp)
2342 rtx op0, op1, op2, target1, target2;
2343 enum machine_mode mode;
2344 tree arg, sinp, cosp;
2346 location_t loc = EXPR_LOCATION (exp);
2348 if (!validate_arglist (exp, REAL_TYPE,
2349 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2352 arg = CALL_EXPR_ARG (exp, 0);
2353 sinp = CALL_EXPR_ARG (exp, 1);
2354 cosp = CALL_EXPR_ARG (exp, 2);
2356 /* Make a suitable register to place result in. */
2357 mode = TYPE_MODE (TREE_TYPE (arg));
2359 /* Check if sincos insn is available, otherwise emit the call. */
2360 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2363 target1 = gen_reg_rtx (mode);
2364 target2 = gen_reg_rtx (mode);
2366 op0 = expand_normal (arg);
2367 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2368 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2370 /* Compute into target1 and target2.
2371 Set TARGET to wherever the result comes back. */
2372 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2373 gcc_assert (result);
2375 /* Move target1 and target2 to the memory locations indicated
2377 emit_move_insn (op1, target1);
2378 emit_move_insn (op2, target2);
2383 /* Expand a call to the internal cexpi builtin to the sincos math function.
2384 EXP is the expression that is a call to the builtin function; if convenient,
2385 the result should be placed in TARGET. SUBTARGET may be used as the target
2386 for computing one of EXP's operands. */
2389 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2391 tree fndecl = get_callee_fndecl (exp);
2393 enum machine_mode mode;
2395 location_t loc = EXPR_LOCATION (exp);
2397 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2400 arg = CALL_EXPR_ARG (exp, 0);
2401 type = TREE_TYPE (arg);
2402 mode = TYPE_MODE (TREE_TYPE (arg));
2404 /* Try expanding via a sincos optab, fall back to emitting a libcall
2405 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2406 is only generated from sincos, cexp or if we have either of them. */
2407 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2409 op1 = gen_reg_rtx (mode);
2410 op2 = gen_reg_rtx (mode);
2412 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2414 /* Compute into op1 and op2. */
2415 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2417 else if (TARGET_HAS_SINCOS)
2419 tree call, fn = NULL_TREE;
2423 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2424 fn = built_in_decls[BUILT_IN_SINCOSF];
2425 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2426 fn = built_in_decls[BUILT_IN_SINCOS];
2427 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2428 fn = built_in_decls[BUILT_IN_SINCOSL];
2432 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2433 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2434 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2435 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2436 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2437 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2439 /* Make sure not to fold the sincos call again. */
2440 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2441 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2442 call, 3, arg, top1, top2));
2446 tree call, fn = NULL_TREE, narg;
2447 tree ctype = build_complex_type (type);
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 fn = built_in_decls[BUILT_IN_CEXPF];
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 fn = built_in_decls[BUILT_IN_CEXP];
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 fn = built_in_decls[BUILT_IN_CEXPL];
2458 /* If we don't have a decl for cexp create one. This is the
2459 friendliest fallback if the user calls __builtin_cexpi
2460 without full target C99 function support. */
2461 if (fn == NULL_TREE)
2464 const char *name = NULL;
2466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2470 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2473 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2474 fn = build_fn_decl (name, fntype);
2477 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2478 build_real (type, dconst0), arg);
2480 /* Make sure not to fold the cexp call again. */
2481 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2482 return expand_expr (build_call_nary (ctype, call, 1, narg),
2483 target, VOIDmode, EXPAND_NORMAL);
2486 /* Now build the proper return type. */
2487 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2488 make_tree (TREE_TYPE (arg), op2),
2489 make_tree (TREE_TYPE (arg), op1)),
2490 target, VOIDmode, EXPAND_NORMAL);
2493 /* Conveniently construct a function call expression. FNDECL names the
2494 function to be called, N is the number of arguments, and the "..."
2495 parameters are the argument expressions. Unlike build_call_exr
2496 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2499 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2502 tree fntype = TREE_TYPE (fndecl);
2503 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2506 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2508 SET_EXPR_LOCATION (fn, loc);
2511 #define build_call_nofold(...) \
2512 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2514 /* Expand a call to one of the builtin rounding functions gcc defines
2515 as an extension (lfloor and lceil). As these are gcc extensions we
2516 do not need to worry about setting errno to EDOM.
2517 If expanding via optab fails, lower expression to (int)(floor(x)).
2518 EXP is the expression that is a call to the builtin function;
2519 if convenient, the result should be placed in TARGET. */
2522 expand_builtin_int_roundingfn (tree exp, rtx target)
2524 convert_optab builtin_optab;
2525 rtx op0, insns, tmp;
2526 tree fndecl = get_callee_fndecl (exp);
2527 enum built_in_function fallback_fn;
2528 tree fallback_fndecl;
2529 enum machine_mode mode;
2532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2535 arg = CALL_EXPR_ARG (exp, 0);
2537 switch (DECL_FUNCTION_CODE (fndecl))
2539 CASE_FLT_FN (BUILT_IN_LCEIL):
2540 CASE_FLT_FN (BUILT_IN_LLCEIL):
2541 builtin_optab = lceil_optab;
2542 fallback_fn = BUILT_IN_CEIL;
2545 CASE_FLT_FN (BUILT_IN_LFLOOR):
2546 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2547 builtin_optab = lfloor_optab;
2548 fallback_fn = BUILT_IN_FLOOR;
2555 /* Make a suitable register to place result in. */
2556 mode = TYPE_MODE (TREE_TYPE (exp));
2558 target = gen_reg_rtx (mode);
2560 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2561 need to expand the argument again. This way, we will not perform
2562 side-effects more the once. */
2563 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2565 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2569 /* Compute into TARGET. */
2570 if (expand_sfix_optab (target, op0, builtin_optab))
2572 /* Output the entire sequence. */
2573 insns = get_insns ();
2579 /* If we were unable to expand via the builtin, stop the sequence
2580 (without outputting the insns). */
2583 /* Fall back to floating point rounding optab. */
2584 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2586 /* For non-C99 targets we may end up without a fallback fndecl here
2587 if the user called __builtin_lfloor directly. In this case emit
2588 a call to the floor/ceil variants nevertheless. This should result
2589 in the best user experience for not full C99 targets. */
2590 if (fallback_fndecl == NULL_TREE)
2593 const char *name = NULL;
2595 switch (DECL_FUNCTION_CODE (fndecl))
2597 case BUILT_IN_LCEIL:
2598 case BUILT_IN_LLCEIL:
2601 case BUILT_IN_LCEILF:
2602 case BUILT_IN_LLCEILF:
2605 case BUILT_IN_LCEILL:
2606 case BUILT_IN_LLCEILL:
2609 case BUILT_IN_LFLOOR:
2610 case BUILT_IN_LLFLOOR:
2613 case BUILT_IN_LFLOORF:
2614 case BUILT_IN_LLFLOORF:
2617 case BUILT_IN_LFLOORL:
2618 case BUILT_IN_LLFLOORL:
2625 fntype = build_function_type_list (TREE_TYPE (arg),
2626 TREE_TYPE (arg), NULL_TREE);
2627 fallback_fndecl = build_fn_decl (name, fntype);
2630 exp = build_call_nofold (fallback_fndecl, 1, arg);
2632 tmp = expand_normal (exp);
2634 /* Truncate the result of floating point optab to integer
2635 via expand_fix (). */
2636 target = gen_reg_rtx (mode);
2637 expand_fix (target, tmp, 0);
2642 /* Expand a call to one of the builtin math functions doing integer
2644 Return 0 if a normal call should be emitted rather than expanding the
2645 function in-line. EXP is the expression that is a call to the builtin
2646 function; if convenient, the result should be placed in TARGET. */
2649 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2651 convert_optab builtin_optab;
2653 tree fndecl = get_callee_fndecl (exp);
2655 enum machine_mode mode;
2657 /* There's no easy way to detect the case we need to set EDOM. */
2658 if (flag_errno_math)
2661 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2664 arg = CALL_EXPR_ARG (exp, 0);
2666 switch (DECL_FUNCTION_CODE (fndecl))
2668 CASE_FLT_FN (BUILT_IN_LRINT):
2669 CASE_FLT_FN (BUILT_IN_LLRINT):
2670 builtin_optab = lrint_optab; break;
2671 CASE_FLT_FN (BUILT_IN_LROUND):
2672 CASE_FLT_FN (BUILT_IN_LLROUND):
2673 builtin_optab = lround_optab; break;
2678 /* Make a suitable register to place result in. */
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2681 target = gen_reg_rtx (mode);
2683 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2684 need to expand the argument again. This way, we will not perform
2685 side-effects more the once. */
2686 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2688 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2692 if (expand_sfix_optab (target, op0, builtin_optab))
2694 /* Output the entire sequence. */
2695 insns = get_insns ();
2701 /* If we were unable to expand via the builtin, stop the sequence
2702 (without outputting the insns) and call to the library function
2703 with the stabilized argument list. */
2706 target = expand_call (exp, target, target == const0_rtx);
2711 /* To evaluate powi(x,n), the floating point value x raised to the
2712 constant integer exponent n, we use a hybrid algorithm that
2713 combines the "window method" with look-up tables. For an
2714 introduction to exponentiation algorithms and "addition chains",
2715 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2716 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2717 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2718 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2720 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2721 multiplications to inline before calling the system library's pow
2722 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2723 so this default never requires calling pow, powf or powl. */
2725 #ifndef POWI_MAX_MULTS
2726 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2729 /* The size of the "optimal power tree" lookup table. All
2730 exponents less than this value are simply looked up in the
2731 powi_table below. This threshold is also used to size the
2732 cache of pseudo registers that hold intermediate results. */
2733 #define POWI_TABLE_SIZE 256
2735 /* The size, in bits of the window, used in the "window method"
2736 exponentiation algorithm. This is equivalent to a radix of
2737 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2738 #define POWI_WINDOW_SIZE 3
2740 /* The following table is an efficient representation of an
2741 "optimal power tree". For each value, i, the corresponding
2742 value, j, in the table states than an optimal evaluation
2743 sequence for calculating pow(x,i) can be found by evaluating
2744 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2745 100 integers is given in Knuth's "Seminumerical algorithms". */
2747 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2749 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2750 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2751 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2752 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2753 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2754 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2755 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2756 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2757 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2758 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2759 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2760 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2761 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2762 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2763 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2764 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2765 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2766 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2767 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2768 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2769 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2770 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2771 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2772 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2773 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2774 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2775 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2776 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2777 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2778 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2779 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2780 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2784 /* Return the number of multiplications required to calculate
2785 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2786 subroutine of powi_cost. CACHE is an array indicating
2787 which exponents have already been calculated. */
2790 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2792 /* If we've already calculated this exponent, then this evaluation
2793 doesn't require any additional multiplications. */
2798 return powi_lookup_cost (n - powi_table[n], cache)
2799 + powi_lookup_cost (powi_table[n], cache) + 1;
2802 /* Return the number of multiplications required to calculate
2803 powi(x,n) for an arbitrary x, given the exponent N. This
2804 function needs to be kept in sync with expand_powi below. */
2807 powi_cost (HOST_WIDE_INT n)
2809 bool cache[POWI_TABLE_SIZE];
2810 unsigned HOST_WIDE_INT digit;
2811 unsigned HOST_WIDE_INT val;
2817 /* Ignore the reciprocal when calculating the cost. */
2818 val = (n < 0) ? -n : n;
2820 /* Initialize the exponent cache. */
2821 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2826 while (val >= POWI_TABLE_SIZE)
2830 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2831 result += powi_lookup_cost (digit, cache)
2832 + POWI_WINDOW_SIZE + 1;
2833 val >>= POWI_WINDOW_SIZE;
2842 return result + powi_lookup_cost (val, cache);
2845 /* Recursive subroutine of expand_powi. This function takes the array,
2846 CACHE, of already calculated exponents and an exponent N and returns
2847 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2850 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2852 unsigned HOST_WIDE_INT digit;
2856 if (n < POWI_TABLE_SIZE)
2861 target = gen_reg_rtx (mode);
2864 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2865 op1 = expand_powi_1 (mode, powi_table[n], cache);
2869 target = gen_reg_rtx (mode);
2870 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2871 op0 = expand_powi_1 (mode, n - digit, cache);
2872 op1 = expand_powi_1 (mode, digit, cache);
2876 target = gen_reg_rtx (mode);
2877 op0 = expand_powi_1 (mode, n >> 1, cache);
2881 result = expand_mult (mode, op0, op1, target, 0);
2882 if (result != target)
2883 emit_move_insn (target, result);
2887 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2888 floating point operand in mode MODE, and N is the exponent. This
2889 function needs to be kept in sync with powi_cost above. */
2892 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2894 rtx cache[POWI_TABLE_SIZE];
2898 return CONST1_RTX (mode);
2900 memset (cache, 0, sizeof (cache));
2903 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2905 /* If the original exponent was negative, reciprocate the result. */
2907 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2908 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2913 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2914 a normal call should be emitted rather than expanding the function
2915 in-line. EXP is the expression that is a call to the builtin
2916 function; if convenient, the result should be placed in TARGET. */
2919 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2923 tree type = TREE_TYPE (exp);
2924 REAL_VALUE_TYPE cint, c, c2;
2927 enum machine_mode mode = TYPE_MODE (type);
2929 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2932 arg0 = CALL_EXPR_ARG (exp, 0);
2933 arg1 = CALL_EXPR_ARG (exp, 1);
2935 if (TREE_CODE (arg1) != REAL_CST
2936 || TREE_OVERFLOW (arg1))
2937 return expand_builtin_mathfn_2 (exp, target, subtarget);
2939 /* Handle constant exponents. */
2941 /* For integer valued exponents we can expand to an optimal multiplication
2942 sequence using expand_powi. */
2943 c = TREE_REAL_CST (arg1);
2944 n = real_to_integer (&c);
2945 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2946 if (real_identical (&c, &cint)
2947 && ((n >= -1 && n <= 2)
2948 || (flag_unsafe_math_optimizations
2949 && optimize_insn_for_speed_p ()
2950 && powi_cost (n) <= POWI_MAX_MULTS)))
2952 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2955 op = force_reg (mode, op);
2956 op = expand_powi (op, mode, n);
2961 narg0 = builtin_save_expr (arg0);
2963 /* If the exponent is not integer valued, check if it is half of an integer.
2964 In this case we can expand to sqrt (x) * x**(n/2). */
2965 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2966 if (fn != NULL_TREE)
2968 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2969 n = real_to_integer (&c2);
2970 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2971 if (real_identical (&c2, &cint)
2972 && ((flag_unsafe_math_optimizations
2973 && optimize_insn_for_speed_p ()
2974 && powi_cost (n/2) <= POWI_MAX_MULTS)
2977 tree call_expr = build_call_nofold (fn, 1, narg0);
2978 /* Use expand_expr in case the newly built call expression
2979 was folded to a non-call. */
2980 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2983 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2984 op2 = force_reg (mode, op2);
2985 op2 = expand_powi (op2, mode, abs (n / 2));
2986 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2987 0, OPTAB_LIB_WIDEN);
2988 /* If the original exponent was negative, reciprocate the
2991 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2992 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2998 /* Try if the exponent is a third of an integer. In this case
2999 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3000 different from pow (x, 1./3.) due to rounding and behavior
3001 with negative x we need to constrain this transformation to
3002 unsafe math and positive x or finite math. */
3003 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3005 && flag_unsafe_math_optimizations
3006 && (tree_expr_nonnegative_p (arg0)
3007 || !HONOR_NANS (mode)))
3009 REAL_VALUE_TYPE dconst3;
3010 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3011 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3012 real_round (&c2, mode, &c2);
3013 n = real_to_integer (&c2);
3014 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3015 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3016 real_convert (&c2, mode, &c2);
3017 if (real_identical (&c2, &c)
3018 && ((optimize_insn_for_speed_p ()
3019 && powi_cost (n/3) <= POWI_MAX_MULTS)
3022 tree call_expr = build_call_nofold (fn, 1,narg0);
3023 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3024 if (abs (n) % 3 == 2)
3025 op = expand_simple_binop (mode, MULT, op, op, op,
3026 0, OPTAB_LIB_WIDEN);
3029 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3030 op2 = force_reg (mode, op2);
3031 op2 = expand_powi (op2, mode, abs (n / 3));
3032 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3033 0, OPTAB_LIB_WIDEN);
3034 /* If the original exponent was negative, reciprocate the
3037 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3038 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3044 /* Fall back to optab expansion. */
3045 return expand_builtin_mathfn_2 (exp, target, subtarget);
3048 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3049 a normal call should be emitted rather than expanding the function
3050 in-line. EXP is the expression that is a call to the builtin
3051 function; if convenient, the result should be placed in TARGET. */
3054 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3058 enum machine_mode mode;
3059 enum machine_mode mode2;
3061 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3064 arg0 = CALL_EXPR_ARG (exp, 0);
3065 arg1 = CALL_EXPR_ARG (exp, 1);
3066 mode = TYPE_MODE (TREE_TYPE (exp));
3068 /* Handle constant power. */
3070 if (TREE_CODE (arg1) == INTEGER_CST
3071 && !TREE_OVERFLOW (arg1))
3073 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3075 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3076 Otherwise, check the number of multiplications required. */
3077 if ((TREE_INT_CST_HIGH (arg1) == 0
3078 || TREE_INT_CST_HIGH (arg1) == -1)
3079 && ((n >= -1 && n <= 2)
3080 || (optimize_insn_for_speed_p ()
3081 && powi_cost (n) <= POWI_MAX_MULTS)))
3083 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3084 op0 = force_reg (mode, op0);
3085 return expand_powi (op0, mode, n);
3089 /* Emit a libcall to libgcc. */
3091 /* Mode of the 2nd argument must match that of an int. */
3092 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3094 if (target == NULL_RTX)
3095 target = gen_reg_rtx (mode);
3097 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3098 if (GET_MODE (op0) != mode)
3099 op0 = convert_to_mode (mode, op0, 0);
3100 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3101 if (GET_MODE (op1) != mode2)
3102 op1 = convert_to_mode (mode2, op1, 0);
3104 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3105 target, LCT_CONST, mode, 2,
3106 op0, mode, op1, mode2);
3111 /* Expand expression EXP which is a call to the strlen builtin. Return
3112 NULL_RTX if we failed the caller should emit a normal call, otherwise
3113 try to get the result in TARGET, if convenient. */
3116 expand_builtin_strlen (tree exp, rtx target,
3117 enum machine_mode target_mode)
3119 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3125 tree src = CALL_EXPR_ARG (exp, 0);
3126 rtx result, src_reg, char_rtx, before_strlen;
3127 enum machine_mode insn_mode = target_mode, char_mode;
3128 enum insn_code icode = CODE_FOR_nothing;
3131 /* If the length can be computed at compile-time, return it. */
3132 len = c_strlen (src, 0);
3134 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3136 /* If the length can be computed at compile-time and is constant
3137 integer, but there are side-effects in src, evaluate
3138 src for side-effects, then return len.
3139 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3140 can be optimized into: i++; x = 3; */
3141 len = c_strlen (src, 1);
3142 if (len && TREE_CODE (len) == INTEGER_CST)
3144 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3145 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3148 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3150 /* If SRC is not a pointer type, don't do this operation inline. */
3154 /* Bail out if we can't compute strlen in the right mode. */
3155 while (insn_mode != VOIDmode)
3157 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3158 if (icode != CODE_FOR_nothing)
3161 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3163 if (insn_mode == VOIDmode)
3166 /* Make a place to write the result of the instruction. */
3170 && GET_MODE (result) == insn_mode
3171 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3172 result = gen_reg_rtx (insn_mode);
3174 /* Make a place to hold the source address. We will not expand
3175 the actual source until we are sure that the expansion will
3176 not fail -- there are trees that cannot be expanded twice. */
3177 src_reg = gen_reg_rtx (Pmode);
3179 /* Mark the beginning of the strlen sequence so we can emit the
3180 source operand later. */
3181 before_strlen = get_last_insn ();
3183 char_rtx = const0_rtx;
3184 char_mode = insn_data[(int) icode].operand[2].mode;
3185 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3187 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3189 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3190 char_rtx, GEN_INT (align));
3195 /* Now that we are assured of success, expand the source. */
3197 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3199 emit_move_insn (src_reg, pat);
3204 emit_insn_after (pat, before_strlen);
3206 emit_insn_before (pat, get_insns ());
3208 /* Return the value in the proper mode for this function. */
3209 if (GET_MODE (result) == target_mode)
3211 else if (target != 0)
3212 convert_move (target, result, 0);
3214 target = convert_to_mode (target_mode, result, 0);
3220 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3221 bytes from constant string DATA + OFFSET and return it as target
3225 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3226 enum machine_mode mode)
3228 const char *str = (const char *) data;
3230 gcc_assert (offset >= 0
3231 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3232 <= strlen (str) + 1));
3234 return c_readstr (str + offset, mode);
3237 /* Expand a call EXP to the memcpy builtin.
3238 Return NULL_RTX if we failed, the caller should emit a normal call,
3239 otherwise try to get the result in TARGET, if convenient (and in
3240 mode MODE if that's convenient). */
3243 expand_builtin_memcpy (tree exp, rtx target)
3245 if (!validate_arglist (exp,
3246 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3250 tree dest = CALL_EXPR_ARG (exp, 0);
3251 tree src = CALL_EXPR_ARG (exp, 1);
3252 tree len = CALL_EXPR_ARG (exp, 2);
3253 const char *src_str;
3254 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3255 unsigned int dest_align
3256 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3257 rtx dest_mem, src_mem, dest_addr, len_rtx;
3258 HOST_WIDE_INT expected_size = -1;
3259 unsigned int expected_align = 0;
3261 /* If DEST is not a pointer type, call the normal function. */
3262 if (dest_align == 0)
3265 /* If either SRC is not a pointer type, don't do this
3266 operation in-line. */
3270 if (currently_expanding_gimple_stmt)
3271 stringop_block_profile (currently_expanding_gimple_stmt,
3272 &expected_align, &expected_size);
3274 if (expected_align < dest_align)
3275 expected_align = dest_align;
3276 dest_mem = get_memory_rtx (dest, len);
3277 set_mem_align (dest_mem, dest_align);
3278 len_rtx = expand_normal (len);
3279 src_str = c_getstr (src);
3281 /* If SRC is a string constant and block move would be done
3282 by pieces, we can avoid loading the string from memory
3283 and only stored the computed constants. */
3285 && CONST_INT_P (len_rtx)
3286 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3287 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3288 CONST_CAST (char *, src_str),
3291 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3292 builtin_memcpy_read_str,
3293 CONST_CAST (char *, src_str),
3294 dest_align, false, 0);
3295 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3296 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3300 src_mem = get_memory_rtx (src, len);
3301 set_mem_align (src_mem, src_align);
3303 /* Copy word part most expediently. */
3304 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3305 CALL_EXPR_TAILCALL (exp)
3306 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3307 expected_align, expected_size);
3311 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3312 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3318 /* Expand a call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed; the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). If ENDP is 0 return the
3322 destination pointer, if ENDP is 1 return the end pointer ala
3323 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3327 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 1);
3336 tree len = CALL_EXPR_ARG (exp, 2);
3337 return expand_builtin_mempcpy_args (dest, src, len,
3338 target, mode, /*endp=*/ 1);
3342 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3343 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3344 so that this can also be called without constructing an actual CALL_EXPR.
3345 The other arguments and return value are the same as for
3346 expand_builtin_mempcpy. */
3349 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3350 rtx target, enum machine_mode mode, int endp)
3352 /* If return value is ignored, transform mempcpy into memcpy. */
3353 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3355 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3356 tree result = build_call_nofold (fn, 3, dest, src, len);
3357 return expand_expr (result, target, mode, EXPAND_NORMAL);
3361 const char *src_str;
3362 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3363 unsigned int dest_align
3364 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3365 rtx dest_mem, src_mem, len_rtx;
3367 /* If either SRC or DEST is not a pointer type, don't do this
3368 operation in-line. */
3369 if (dest_align == 0 || src_align == 0)
3372 /* If LEN is not constant, call the normal function. */
3373 if (! host_integerp (len, 1))
3376 len_rtx = expand_normal (len);
3377 src_str = c_getstr (src);
3379 /* If SRC is a string constant and block move would be done
3380 by pieces, we can avoid loading the string from memory
3381 and only stored the computed constants. */
3383 && CONST_INT_P (len_rtx)
3384 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3385 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3386 CONST_CAST (char *, src_str),
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3392 builtin_memcpy_read_str,
3393 CONST_CAST (char *, src_str),
3394 dest_align, false, endp);
3395 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3396 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3400 if (CONST_INT_P (len_rtx)
3401 && can_move_by_pieces (INTVAL (len_rtx),
3402 MIN (dest_align, src_align)))
3404 dest_mem = get_memory_rtx (dest, len);
3405 set_mem_align (dest_mem, dest_align);
3406 src_mem = get_memory_rtx (src, len);
3407 set_mem_align (src_mem, src_align);
3408 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3409 MIN (dest_align, src_align), endp);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3420 # define HAVE_movstr 0
3421 # define CODE_FOR_movstr CODE_FOR_nothing
3424 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3425 we failed, the caller should emit a normal call, otherwise try to
3426 get the result in TARGET, if convenient. If ENDP is 0 return the
3427 destination pointer, if ENDP is 1 return the end pointer ala
3428 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3432 expand_movstr (tree dest, tree src, rtx target, int endp)
3438 const struct insn_data * data;
3443 dest_mem = get_memory_rtx (dest, NULL);
3444 src_mem = get_memory_rtx (src, NULL);
3447 target = force_reg (Pmode, XEXP (dest_mem, 0));
3448 dest_mem = replace_equiv_address (dest_mem, target);
3449 end = gen_reg_rtx (Pmode);
3453 if (target == 0 || target == const0_rtx)
3455 end = gen_reg_rtx (Pmode);
3463 data = insn_data + CODE_FOR_movstr;
3465 if (data->operand[0].mode != VOIDmode)
3466 end = gen_lowpart (data->operand[0].mode, end);
3468 insn = data->genfun (end, dest_mem, src_mem);
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3477 if (endp == 1 && target != const0_rtx)
3479 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3480 emit_move_insn (target, force_operand (tem, NULL_RTX));
3486 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call, otherwise
3488 try to get the result in TARGET, if convenient (and in mode MODE if that's
3492 expand_builtin_strcpy (tree exp, rtx target)
3494 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 tree dest = CALL_EXPR_ARG (exp, 0);
3497 tree src = CALL_EXPR_ARG (exp, 1);
3498 return expand_builtin_strcpy_args (dest, src, target);
3503 /* Helper function to do the actual work for expand_builtin_strcpy. The
3504 arguments to the builtin_strcpy call DEST and SRC are broken out
3505 so that this can also be called without constructing an actual CALL_EXPR.
3506 The other arguments and return value are the same as for
3507 expand_builtin_strcpy. */
3510 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3512 return expand_movstr (dest, src, target, /*endp=*/0);
3515 /* Expand a call EXP to the stpcpy builtin.
3516 Return NULL_RTX if we failed the caller should emit a normal call,
3517 otherwise try to get the result in TARGET, if convenient (and in
3518 mode MODE if that's convenient). */
3521 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3524 location_t loc = EXPR_LOCATION (exp);
3526 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3529 dst = CALL_EXPR_ARG (exp, 0);
3530 src = CALL_EXPR_ARG (exp, 1);
3532 /* If return value is ignored, transform stpcpy into strcpy. */
3533 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3535 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3536 tree result = build_call_nofold (fn, 2, dst, src);
3537 return expand_expr (result, target, mode, EXPAND_NORMAL);
3544 /* Ensure we get an actual string whose length can be evaluated at
3545 compile-time, not an expression containing a string. This is
3546 because the latter will potentially produce pessimized code
3547 when used to produce the return value. */
3548 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3549 return expand_movstr (dst, src, target, /*endp=*/2);
3551 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3552 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3553 target, mode, /*endp=*/2);
3558 if (TREE_CODE (len) == INTEGER_CST)
3560 rtx len_rtx = expand_normal (len);
3562 if (CONST_INT_P (len_rtx))
3564 ret = expand_builtin_strcpy_args (dst, src, target);
3570 if (mode != VOIDmode)
3571 target = gen_reg_rtx (mode);
3573 target = gen_reg_rtx (GET_MODE (ret));
3575 if (GET_MODE (target) != GET_MODE (ret))
3576 ret = gen_lowpart (GET_MODE (target), ret);
3578 ret = plus_constant (ret, INTVAL (len_rtx));
3579 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3587 return expand_movstr (dst, src, target, /*endp=*/2);
3591 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3592 bytes from constant string DATA + OFFSET and return it as target
3596 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3597 enum machine_mode mode)
3599 const char *str = (const char *) data;
3601 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return c_readstr (str + offset, mode);
3607 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3608 NULL_RTX if we failed the caller should emit a normal call. */
3611 expand_builtin_strncpy (tree exp, rtx target)
3613 location_t loc = EXPR_LOCATION (exp);
3615 if (validate_arglist (exp,
3616 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3618 tree dest = CALL_EXPR_ARG (exp, 0);
3619 tree src = CALL_EXPR_ARG (exp, 1);
3620 tree len = CALL_EXPR_ARG (exp, 2);
3621 tree slen = c_strlen (src, 1);
3623 /* We must be passed a constant len and src parameter. */
3624 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3627 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3629 /* We're required to pad with trailing zeros if the requested
3630 len is greater than strlen(s2)+1. In that case try to
3631 use store_by_pieces, if it fails, punt. */
3632 if (tree_int_cst_lt (slen, len))
3634 unsigned int dest_align
3635 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3636 const char *p = c_getstr (src);
3639 if (!p || dest_align == 0 || !host_integerp (len, 1)
3640 || !can_store_by_pieces (tree_low_cst (len, 1),
3641 builtin_strncpy_read_str,
3642 CONST_CAST (char *, p),
3646 dest_mem = get_memory_rtx (dest, len);
3647 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3648 builtin_strncpy_read_str,
3649 CONST_CAST (char *, p), dest_align, false, 0);
3650 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3651 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3658 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3659 bytes from constant string DATA + OFFSET and return it as target
3663 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3664 enum machine_mode mode)
3666 const char *c = (const char *) data;
3667 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3669 memset (p, *c, GET_MODE_SIZE (mode));
3671 return c_readstr (p, mode);
3674 /* Callback routine for store_by_pieces. Return the RTL of a register
3675 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3676 char value given in the RTL register data. For example, if mode is
3677 4 bytes wide, return the RTL for 0x01010101*data. */
3680 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3681 enum machine_mode mode)
3687 size = GET_MODE_SIZE (mode);
3691 p = XALLOCAVEC (char, size);
3692 memset (p, 1, size);
3693 coeff = c_readstr (p, mode);
3695 target = convert_to_mode (mode, (rtx) data, 1);
3696 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3697 return force_reg (mode, target);
3700 /* Expand expression EXP, which is a call to the memset builtin. Return
3701 NULL_RTX if we failed the caller should emit a normal call, otherwise
3702 try to get the result in TARGET, if convenient (and in mode MODE if that's
3706 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3708 if (!validate_arglist (exp,
3709 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3713 tree dest = CALL_EXPR_ARG (exp, 0);
3714 tree val = CALL_EXPR_ARG (exp, 1);
3715 tree len = CALL_EXPR_ARG (exp, 2);
3716 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3720 /* Helper function to do the actual work for expand_builtin_memset. The
3721 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3722 so that this can also be called without constructing an actual CALL_EXPR.
3723 The other arguments and return value are the same as for
3724 expand_builtin_memset. */
3727 expand_builtin_memset_args (tree dest, tree val, tree len,
3728 rtx target, enum machine_mode mode, tree orig_exp)
3731 enum built_in_function fcode;
3733 unsigned int dest_align;
3734 rtx dest_mem, dest_addr, len_rtx;
3735 HOST_WIDE_INT expected_size = -1;
3736 unsigned int expected_align = 0;
3738 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3740 /* If DEST is not a pointer type, don't do this operation in-line. */
3741 if (dest_align == 0)
3744 if (currently_expanding_gimple_stmt)
3745 stringop_block_profile (currently_expanding_gimple_stmt,
3746 &expected_align, &expected_size);
3748 if (expected_align < dest_align)
3749 expected_align = dest_align;
3751 /* If the LEN parameter is zero, return DEST. */
3752 if (integer_zerop (len))
3754 /* Evaluate and ignore VAL in case it has side-effects. */
3755 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3756 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3759 /* Stabilize the arguments in case we fail. */
3760 dest = builtin_save_expr (dest);
3761 val = builtin_save_expr (val);
3762 len = builtin_save_expr (len);
3764 len_rtx = expand_normal (len);
3765 dest_mem = get_memory_rtx (dest, len);
3767 if (TREE_CODE (val) != INTEGER_CST)
3771 val_rtx = expand_normal (val);
3772 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3775 /* Assume that we can memset by pieces if we can store
3776 * the coefficients by pieces (in the required modes).
3777 * We can't pass builtin_memset_gen_str as that emits RTL. */
3779 if (host_integerp (len, 1)
3780 && can_store_by_pieces (tree_low_cst (len, 1),
3781 builtin_memset_read_str, &c, dest_align,
3784 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3786 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3787 builtin_memset_gen_str, val_rtx, dest_align,
3790 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3791 dest_align, expected_align,
3795 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3796 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3800 if (target_char_cast (val, &c))
3805 if (host_integerp (len, 1)
3806 && can_store_by_pieces (tree_low_cst (len, 1),
3807 builtin_memset_read_str, &c, dest_align,
3809 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3810 builtin_memset_read_str, &c, dest_align, true, 0);
3811 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3812 dest_align, expected_align,
3816 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3817 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3821 set_mem_align (dest_mem, dest_align);
3822 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3823 CALL_EXPR_TAILCALL (orig_exp)
3824 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3825 expected_align, expected_size);
3829 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3830 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3836 fndecl = get_callee_fndecl (orig_exp);
3837 fcode = DECL_FUNCTION_CODE (fndecl);
3838 if (fcode == BUILT_IN_MEMSET)
3839 fn = build_call_nofold (fndecl, 3, dest, val, len);
3840 else if (fcode == BUILT_IN_BZERO)
3841 fn = build_call_nofold (fndecl, 2, dest, len);
3844 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3845 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3846 return expand_call (fn, target, target == const0_rtx);
3849 /* Expand expression EXP, which is a call to the bzero builtin. Return
3850 NULL_RTX if we failed the caller should emit a normal call. */
3853 expand_builtin_bzero (tree exp)
3856 location_t loc = EXPR_LOCATION (exp);
3858 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3861 dest = CALL_EXPR_ARG (exp, 0);
3862 size = CALL_EXPR_ARG (exp, 1);
3864 /* New argument list transforming bzero(ptr x, int y) to
3865 memset(ptr x, int 0, size_t y). This is done this way
3866 so that if it isn't expanded inline, we fallback to
3867 calling bzero instead of memset. */
3869 return expand_builtin_memset_args (dest, integer_zero_node,
3870 fold_convert_loc (loc, sizetype, size),
3871 const0_rtx, VOIDmode, exp);
3874 /* Expand expression EXP, which is a call to the memcmp built-in function.
3875 Return NULL_RTX if we failed and the
3876 caller should emit a normal call, otherwise try to get the result in
3877 TARGET, if convenient (and in mode MODE, if that's convenient). */
3880 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3881 ATTRIBUTE_UNUSED enum machine_mode mode)
3883 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3885 if (!validate_arglist (exp,
3886 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3889 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3891 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3894 tree arg1 = CALL_EXPR_ARG (exp, 0);
3895 tree arg2 = CALL_EXPR_ARG (exp, 1);
3896 tree len = CALL_EXPR_ARG (exp, 2);
3899 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3901 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3902 enum machine_mode insn_mode;
3904 #ifdef HAVE_cmpmemsi
3906 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3909 #ifdef HAVE_cmpstrnsi
3911 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3916 /* If we don't have POINTER_TYPE, call the function. */
3917 if (arg1_align == 0 || arg2_align == 0)
3920 /* Make a place to write the result of the instruction. */
3923 && REG_P (result) && GET_MODE (result) == insn_mode
3924 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3925 result = gen_reg_rtx (insn_mode);
3927 arg1_rtx = get_memory_rtx (arg1, len);
3928 arg2_rtx = get_memory_rtx (arg2, len);
3929 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3931 /* Set MEM_SIZE as appropriate. */
3932 if (CONST_INT_P (arg3_rtx))
3934 set_mem_size (arg1_rtx, arg3_rtx);
3935 set_mem_size (arg2_rtx, arg3_rtx);
3938 #ifdef HAVE_cmpmemsi
3940 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3941 GEN_INT (MIN (arg1_align, arg2_align)));
3944 #ifdef HAVE_cmpstrnsi
3946 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3947 GEN_INT (MIN (arg1_align, arg2_align)));
3955 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3956 TYPE_MODE (integer_type_node), 3,
3957 XEXP (arg1_rtx, 0), Pmode,
3958 XEXP (arg2_rtx, 0), Pmode,
3959 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3960 TYPE_UNSIGNED (sizetype)),
3961 TYPE_MODE (sizetype));
3963 /* Return the value in the proper mode for this function. */
3964 mode = TYPE_MODE (TREE_TYPE (exp));
3965 if (GET_MODE (result) == mode)
3967 else if (target != 0)
3969 convert_move (target, result, 0);
3973 return convert_to_mode (mode, result, 0);
3980 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3981 if we failed the caller should emit a normal call, otherwise try to get
3982 the result in TARGET, if convenient. */
3985 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3987 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3990 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3991 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3992 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3994 rtx arg1_rtx, arg2_rtx;
3995 rtx result, insn = NULL_RTX;
3997 tree arg1 = CALL_EXPR_ARG (exp, 0);
3998 tree arg2 = CALL_EXPR_ARG (exp, 1);
4001 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4003 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4005 /* If we don't have POINTER_TYPE, call the function. */
4006 if (arg1_align == 0 || arg2_align == 0)
4009 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4010 arg1 = builtin_save_expr (arg1);
4011 arg2 = builtin_save_expr (arg2);
4013 arg1_rtx = get_memory_rtx (arg1, NULL);
4014 arg2_rtx = get_memory_rtx (arg2, NULL);
4016 #ifdef HAVE_cmpstrsi
4017 /* Try to call cmpstrsi. */
4020 enum machine_mode insn_mode
4021 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4023 /* Make a place to write the result of the instruction. */
4026 && REG_P (result) && GET_MODE (result) == insn_mode
4027 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4028 result = gen_reg_rtx (insn_mode);
4030 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4031 GEN_INT (MIN (arg1_align, arg2_align)));
4034 #ifdef HAVE_cmpstrnsi
4035 /* Try to determine at least one length and call cmpstrnsi. */
4036 if (!insn && HAVE_cmpstrnsi)
4041 enum machine_mode insn_mode
4042 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4043 tree len1 = c_strlen (arg1, 1);
4044 tree len2 = c_strlen (arg2, 1);
4047 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4049 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4051 /* If we don't have a constant length for the first, use the length
4052 of the second, if we know it. We don't require a constant for
4053 this case; some cost analysis could be done if both are available
4054 but neither is constant. For now, assume they're equally cheap,
4055 unless one has side effects. If both strings have constant lengths,
4062 else if (TREE_SIDE_EFFECTS (len1))
4064 else if (TREE_SIDE_EFFECTS (len2))
4066 else if (TREE_CODE (len1) != INTEGER_CST)
4068 else if (TREE_CODE (len2) != INTEGER_CST)
4070 else if (tree_int_cst_lt (len1, len2))
4075 /* If both arguments have side effects, we cannot optimize. */
4076 if (!len || TREE_SIDE_EFFECTS (len))
4079 arg3_rtx = expand_normal (len);
4081 /* Make a place to write the result of the instruction. */
4084 && REG_P (result) && GET_MODE (result) == insn_mode
4085 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4086 result = gen_reg_rtx (insn_mode);
4088 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4089 GEN_INT (MIN (arg1_align, arg2_align)));
4095 enum machine_mode mode;
4098 /* Return the value in the proper mode for this function. */
4099 mode = TYPE_MODE (TREE_TYPE (exp));
4100 if (GET_MODE (result) == mode)
4103 return convert_to_mode (mode, result, 0);
4104 convert_move (target, result, 0);
4108 /* Expand the library call ourselves using a stabilized argument
4109 list to avoid re-evaluating the function's arguments twice. */
4110 #ifdef HAVE_cmpstrnsi
4113 fndecl = get_callee_fndecl (exp);
4114 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4115 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4116 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4117 return expand_call (fn, target, target == const0_rtx);
4123 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4124 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4125 the result in TARGET, if convenient. */
4128 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4129 ATTRIBUTE_UNUSED enum machine_mode mode)
4131 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4133 if (!validate_arglist (exp,
4134 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4137 /* If c_strlen can determine an expression for one of the string
4138 lengths, and it doesn't have side effects, then emit cmpstrnsi
4139 using length MIN(strlen(string)+1, arg3). */
4140 #ifdef HAVE_cmpstrnsi
4143 tree len, len1, len2;
4144 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4147 tree arg1 = CALL_EXPR_ARG (exp, 0);
4148 tree arg2 = CALL_EXPR_ARG (exp, 1);
4149 tree arg3 = CALL_EXPR_ARG (exp, 2);
4152 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4154 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4155 enum machine_mode insn_mode
4156 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4158 len1 = c_strlen (arg1, 1);
4159 len2 = c_strlen (arg2, 1);
4162 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4164 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4166 /* If we don't have a constant length for the first, use the length
4167 of the second, if we know it. We don't require a constant for
4168 this case; some cost analysis could be done if both are available
4169 but neither is constant. For now, assume they're equally cheap,
4170 unless one has side effects. If both strings have constant lengths,
4177 else if (TREE_SIDE_EFFECTS (len1))
4179 else if (TREE_SIDE_EFFECTS (len2))
4181 else if (TREE_CODE (len1) != INTEGER_CST)
4183 else if (TREE_CODE (len2) != INTEGER_CST)
4185 else if (tree_int_cst_lt (len1, len2))
4190 /* If both arguments have side effects, we cannot optimize. */
4191 if (!len || TREE_SIDE_EFFECTS (len))
4194 /* The actual new length parameter is MIN(len,arg3). */
4195 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4196 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4198 /* If we don't have POINTER_TYPE, call the function. */
4199 if (arg1_align == 0 || arg2_align == 0)
4202 /* Make a place to write the result of the instruction. */
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4209 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4210 arg1 = builtin_save_expr (arg1);
4211 arg2 = builtin_save_expr (arg2);
4212 len = builtin_save_expr (len);
4214 arg1_rtx = get_memory_rtx (arg1, len);
4215 arg2_rtx = get_memory_rtx (arg2, len);
4216 arg3_rtx = expand_normal (len);
4217 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4218 GEN_INT (MIN (arg1_align, arg2_align)));
4223 /* Return the value in the proper mode for this function. */
4224 mode = TYPE_MODE (TREE_TYPE (exp));
4225 if (GET_MODE (result) == mode)
4228 return convert_to_mode (mode, result, 0);
4229 convert_move (target, result, 0);
4233 /* Expand the library call ourselves using a stabilized argument
4234 list to avoid re-evaluating the function's arguments twice. */
4235 fndecl = get_callee_fndecl (exp);
4236 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4237 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4238 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4239 return expand_call (fn, target, target == const0_rtx);
4245 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4246 if that's convenient. */
4249 expand_builtin_saveregs (void)
4253 /* Don't do __builtin_saveregs more than once in a function.
4254 Save the result of the first call and reuse it. */
4255 if (saveregs_value != 0)
4256 return saveregs_value;
4258 /* When this function is called, it means that registers must be
4259 saved on entry to this function. So we migrate the call to the
4260 first insn of this function. */
4264 /* Do whatever the machine needs done in this case. */
4265 val = targetm.calls.expand_builtin_saveregs ();
4270 saveregs_value = val;
4272 /* Put the insns after the NOTE that starts the function. If this
4273 is inside a start_sequence, make the outer-level insn chain current, so
4274 the code is placed at the start of the function. */
4275 push_topmost_sequence ();
4276 emit_insn_after (seq, entry_of_function ());
4277 pop_topmost_sequence ();
4282 /* __builtin_args_info (N) returns word N of the arg space info
4283 for the current function. The number and meanings of words
4284 is controlled by the definition of CUMULATIVE_ARGS. */
4287 expand_builtin_args_info (tree exp)
4289 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4290 int *word_ptr = (int *) &crtl->args.info;
4292 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4294 if (call_expr_nargs (exp) != 0)
4296 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4297 error ("argument of %<__builtin_args_info%> must be constant");
4300 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4302 if (wordnum < 0 || wordnum >= nwords)
4303 error ("argument of %<__builtin_args_info%> out of range");
4305 return GEN_INT (word_ptr[wordnum]);
4309 error ("missing argument in %<__builtin_args_info%>");
4314 /* Expand a call to __builtin_next_arg. */
4317 expand_builtin_next_arg (void)
4319 /* Checking arguments is already done in fold_builtin_next_arg
4320 that must be called before this function. */
4321 return expand_binop (ptr_mode, add_optab,
4322 crtl->args.internal_arg_pointer,
4323 crtl->args.arg_offset_rtx,
4324 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4327 /* Make it easier for the backends by protecting the valist argument
4328 from multiple evaluations. */
4331 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4333 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4335 gcc_assert (vatype != NULL_TREE);
4337 if (TREE_CODE (vatype) == ARRAY_TYPE)
4339 if (TREE_SIDE_EFFECTS (valist))
4340 valist = save_expr (valist);
4342 /* For this case, the backends will be expecting a pointer to
4343 vatype, but it's possible we've actually been given an array
4344 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4346 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4348 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4349 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4358 if (! TREE_SIDE_EFFECTS (valist))
4361 pt = build_pointer_type (vatype);
4362 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4363 TREE_SIDE_EFFECTS (valist) = 1;
4366 if (TREE_SIDE_EFFECTS (valist))
4367 valist = save_expr (valist);
4368 valist = build_fold_indirect_ref_loc (loc, valist);
4374 /* The "standard" definition of va_list is void*. */
4377 std_build_builtin_va_list (void)
4379 return ptr_type_node;
4382 /* The "standard" abi va_list is va_list_type_node. */
4385 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4387 return va_list_type_node;
4390 /* The "standard" type of va_list is va_list_type_node. */
4393 std_canonical_va_list_type (tree type)
4397 if (INDIRECT_REF_P (type))
4398 type = TREE_TYPE (type);
4399 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4400 type = TREE_TYPE (type);
4401 wtype = va_list_type_node;
4403 /* Treat structure va_list types. */
4404 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4405 htype = TREE_TYPE (htype);
4406 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4408 /* If va_list is an array type, the argument may have decayed
4409 to a pointer type, e.g. by being passed to another function.
4410 In that case, unwrap both types so that we can compare the
4411 underlying records. */
4412 if (TREE_CODE (htype) == ARRAY_TYPE
4413 || POINTER_TYPE_P (htype))
4415 wtype = TREE_TYPE (wtype);
4416 htype = TREE_TYPE (htype);
4419 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4420 return va_list_type_node;
4425 /* The "standard" implementation of va_start: just assign `nextarg' to
4429 std_expand_builtin_va_start (tree valist, rtx nextarg)
4431 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4432 convert_move (va_r, nextarg, 0);
4435 /* Expand EXP, a call to __builtin_va_start. */
4438 expand_builtin_va_start (tree exp)
4442 location_t loc = EXPR_LOCATION (exp);
4444 if (call_expr_nargs (exp) < 2)
4446 error_at (loc, "too few arguments to function %<va_start%>");
4450 if (fold_builtin_next_arg (exp, true))
4453 nextarg = expand_builtin_next_arg ();
4454 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4456 if (targetm.expand_builtin_va_start)
4457 targetm.expand_builtin_va_start (valist, nextarg);
4459 std_expand_builtin_va_start (valist, nextarg);
4464 /* The "standard" implementation of va_arg: read the value from the
4465 current (padded) address and increment by the (padded) size. */
4468 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4471 tree addr, t, type_size, rounded_size, valist_tmp;
4472 unsigned HOST_WIDE_INT align, boundary;
4475 #ifdef ARGS_GROW_DOWNWARD
4476 /* All of the alignment and movement below is for args-grow-up machines.
4477 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4478 implement their own specialized gimplify_va_arg_expr routines. */
4482 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4484 type = build_pointer_type (type);
4486 align = PARM_BOUNDARY / BITS_PER_UNIT;
4487 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4489 /* When we align parameter on stack for caller, if the parameter
4490 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4491 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4492 here with caller. */
4493 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4494 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4496 boundary /= BITS_PER_UNIT;
4498 /* Hoist the valist value into a temporary for the moment. */
4499 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4501 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4502 requires greater alignment, we must perform dynamic alignment. */
4503 if (boundary > align
4504 && !integer_zerop (TYPE_SIZE (type)))
4506 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4507 fold_build2 (POINTER_PLUS_EXPR,
4509 valist_tmp, size_int (boundary - 1)));
4510 gimplify_and_add (t, pre_p);
4512 t = fold_convert (sizetype, valist_tmp);
4513 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4514 fold_convert (TREE_TYPE (valist),
4515 fold_build2 (BIT_AND_EXPR, sizetype, t,
4516 size_int (-boundary))));
4517 gimplify_and_add (t, pre_p);
4522 /* If the actual alignment is less than the alignment of the type,
4523 adjust the type accordingly so that we don't assume strict alignment
4524 when dereferencing the pointer. */
4525 boundary *= BITS_PER_UNIT;
4526 if (boundary < TYPE_ALIGN (type))
4528 type = build_variant_type_copy (type);
4529 TYPE_ALIGN (type) = boundary;
4532 /* Compute the rounded size of the type. */
4533 type_size = size_in_bytes (type);
4534 rounded_size = round_up (type_size, align);
4536 /* Reduce rounded_size so it's sharable with the postqueue. */
4537 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4541 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4543 /* Small args are padded downward. */
4544 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4545 rounded_size, size_int (align));
4546 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4547 size_binop (MINUS_EXPR, rounded_size, type_size));
4548 addr = fold_build2 (POINTER_PLUS_EXPR,
4549 TREE_TYPE (addr), addr, t);
4552 /* Compute new value for AP. */
4553 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4554 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4555 gimplify_and_add (t, pre_p);
4557 addr = fold_convert (build_pointer_type (type), addr);
4560 addr = build_va_arg_indirect_ref (addr);
4562 return build_va_arg_indirect_ref (addr);
4565 /* Build an indirect-ref expression over the given TREE, which represents a
4566 piece of a va_arg() expansion. */
4568 build_va_arg_indirect_ref (tree addr)
4570 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4572 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4578 /* Return a dummy expression of type TYPE in order to keep going after an
4582 dummy_object (tree type)
4584 tree t = build_int_cst (build_pointer_type (type), 0);
4585 return build1 (INDIRECT_REF, type, t);
4588 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4589 builtin function, but a very special sort of operator. */
4591 enum gimplify_status
4592 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4594 tree promoted_type, have_va_type;
4595 tree valist = TREE_OPERAND (*expr_p, 0);
4596 tree type = TREE_TYPE (*expr_p);
4598 location_t loc = EXPR_LOCATION (*expr_p);
4600 /* Verify that valist is of the proper type. */
4601 have_va_type = TREE_TYPE (valist);
4602 if (have_va_type == error_mark_node)
4604 have_va_type = targetm.canonical_va_list_type (have_va_type);
4606 if (have_va_type == NULL_TREE)
4608 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4612 /* Generate a diagnostic for requesting data of a type that cannot
4613 be passed through `...' due to type promotion at the call site. */
4614 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4617 static bool gave_help;
4620 /* Unfortunately, this is merely undefined, rather than a constraint
4621 violation, so we cannot make this an error. If this call is never
4622 executed, the program is still strictly conforming. */
4623 warned = warning_at (loc, 0,
4624 "%qT is promoted to %qT when passed through %<...%>",
4625 type, promoted_type);
4626 if (!gave_help && warned)
4629 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4630 promoted_type, type);
4633 /* We can, however, treat "undefined" any way we please.
4634 Call abort to encourage the user to fix the program. */
4636 inform (loc, "if this code is reached, the program will abort");
4637 /* Before the abort, allow the evaluation of the va_list
4638 expression to exit or longjmp. */
4639 gimplify_and_add (valist, pre_p);
4640 t = build_call_expr_loc (loc,
4641 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4642 gimplify_and_add (t, pre_p);
4644 /* This is dead code, but go ahead and finish so that the
4645 mode of the result comes out right. */
4646 *expr_p = dummy_object (type);
4651 /* Make it easier for the backends by protecting the valist argument
4652 from multiple evaluations. */
4653 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4655 /* For this case, the backends will be expecting a pointer to
4656 TREE_TYPE (abi), but it's possible we've
4657 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4659 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4661 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4662 valist = fold_convert_loc (loc, p1,
4663 build_fold_addr_expr_loc (loc, valist));
4666 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4669 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4671 if (!targetm.gimplify_va_arg_expr)
4672 /* FIXME: Once most targets are converted we should merely
4673 assert this is non-null. */
4676 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4681 /* Expand EXP, a call to __builtin_va_end. */
4684 expand_builtin_va_end (tree exp)
4686 tree valist = CALL_EXPR_ARG (exp, 0);
4688 /* Evaluate for side effects, if needed. I hate macros that don't
4690 if (TREE_SIDE_EFFECTS (valist))
4691 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4696 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4697 builtin rather than just as an assignment in stdarg.h because of the
4698 nastiness of array-type va_list types. */
4701 expand_builtin_va_copy (tree exp)
4704 location_t loc = EXPR_LOCATION (exp);
4706 dst = CALL_EXPR_ARG (exp, 0);
4707 src = CALL_EXPR_ARG (exp, 1);
4709 dst = stabilize_va_list_loc (loc, dst, 1);
4710 src = stabilize_va_list_loc (loc, src, 0);
4712 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4714 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4716 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4717 TREE_SIDE_EFFECTS (t) = 1;
4718 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4722 rtx dstb, srcb, size;
4724 /* Evaluate to pointers. */
4725 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4726 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4727 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4728 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4730 dstb = convert_memory_address (Pmode, dstb);
4731 srcb = convert_memory_address (Pmode, srcb);
4733 /* "Dereference" to BLKmode memories. */
4734 dstb = gen_rtx_MEM (BLKmode, dstb);
4735 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4736 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4737 srcb = gen_rtx_MEM (BLKmode, srcb);
4738 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4739 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4742 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4748 /* Expand a call to one of the builtin functions __builtin_frame_address or
4749 __builtin_return_address. */
4752 expand_builtin_frame_address (tree fndecl, tree exp)
4754 /* The argument must be a nonnegative integer constant.
4755 It counts the number of frames to scan up the stack.
4756 The value is the return address saved in that frame. */
4757 if (call_expr_nargs (exp) == 0)
4758 /* Warning about missing arg was already issued. */
4760 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4762 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4763 error ("invalid argument to %<__builtin_frame_address%>");
4765 error ("invalid argument to %<__builtin_return_address%>");
4771 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4772 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4774 /* Some ports cannot access arbitrary stack frames. */
4777 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4778 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4780 warning (0, "unsupported argument to %<__builtin_return_address%>");
4784 /* For __builtin_frame_address, return what we've got. */
4785 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4789 && ! CONSTANT_P (tem))
4790 tem = copy_to_mode_reg (Pmode, tem);
4795 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4796 we failed and the caller should emit a normal call, otherwise try to get
4797 the result in TARGET, if convenient. */
4800 expand_builtin_alloca (tree exp, rtx target)
4805 /* Emit normal call if marked not-inlineable. */
4806 if (CALL_CANNOT_INLINE_P (exp))
4809 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4812 /* Compute the argument. */
4813 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4815 /* Allocate the desired space. */
4816 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4817 result = convert_memory_address (ptr_mode, result);
4822 /* Expand a call to a bswap builtin with argument ARG0. MODE
4823 is the mode to expand with. */
4826 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4828 enum machine_mode mode;
4832 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4835 arg = CALL_EXPR_ARG (exp, 0);
4836 mode = TYPE_MODE (TREE_TYPE (arg));
4837 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4839 target = expand_unop (mode, bswap_optab, op0, target, 1);
4841 gcc_assert (target);
4843 return convert_to_mode (mode, target, 0);
4846 /* Expand a call to a unary builtin in EXP.
4847 Return NULL_RTX if a normal call should be emitted rather than expanding the
4848 function in-line. If convenient, the result should be placed in TARGET.
4849 SUBTARGET may be used as the target for computing one of EXP's operands. */
4852 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4853 rtx subtarget, optab op_optab)
4857 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4860 /* Compute the argument. */
4861 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4862 VOIDmode, EXPAND_NORMAL);
4863 /* Compute op, into TARGET if possible.
4864 Set TARGET to wherever the result comes back. */
4865 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4866 op_optab, op0, target, 1);
4867 gcc_assert (target);
4869 return convert_to_mode (target_mode, target, 0);
4872 /* Expand a call to __builtin_expect. We just return our argument
4873 as the builtin_expect semantic should've been already executed by
4874 tree branch prediction pass. */
4877 expand_builtin_expect (tree exp, rtx target)
4881 if (call_expr_nargs (exp) < 2)
4883 arg = CALL_EXPR_ARG (exp, 0);
4885 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4886 /* When guessing was done, the hints should be already stripped away. */
4887 gcc_assert (!flag_guess_branch_prob
4888 || optimize == 0 || errorcount || sorrycount);
4893 expand_builtin_trap (void)
4897 emit_insn (gen_trap ());
4900 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4904 /* Expand a call to __builtin_unreachable. We do nothing except emit
4905 a barrier saying that control flow will not pass here.
4907 It is the responsibility of the program being compiled to ensure
4908 that control flow does never reach __builtin_unreachable. */
4910 expand_builtin_unreachable (void)
4915 /* Expand EXP, a call to fabs, fabsf or fabsl.
4916 Return NULL_RTX if a normal call should be emitted rather than expanding
4917 the function inline. If convenient, the result should be placed
4918 in TARGET. SUBTARGET may be used as the target for computing
4922 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4924 enum machine_mode mode;
4928 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4931 arg = CALL_EXPR_ARG (exp, 0);
4932 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4933 mode = TYPE_MODE (TREE_TYPE (arg));
4934 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4935 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4938 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4939 Return NULL is a normal call should be emitted rather than expanding the
4940 function inline. If convenient, the result should be placed in TARGET.
4941 SUBTARGET may be used as the target for computing the operand. */
4944 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4949 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4952 arg = CALL_EXPR_ARG (exp, 0);
4953 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4955 arg = CALL_EXPR_ARG (exp, 1);
4956 op1 = expand_normal (arg);
4958 return expand_copysign (op0, op1, target);
4961 /* Create a new constant string literal and return a char* pointer to it.
4962 The STRING_CST value is the LEN characters at STR. */
4964 build_string_literal (int len, const char *str)
4966 tree t, elem, index, type;
4968 t = build_string (len, str);
4969 elem = build_type_variant (char_type_node, 1, 0);
4970 index = build_index_type (size_int (len - 1));
4971 type = build_array_type (elem, index);
4972 TREE_TYPE (t) = type;
4973 TREE_CONSTANT (t) = 1;
4974 TREE_READONLY (t) = 1;
4975 TREE_STATIC (t) = 1;
4977 type = build_pointer_type (elem);
4978 t = build1 (ADDR_EXPR, type,
4979 build4 (ARRAY_REF, elem,
4980 t, integer_zero_node, NULL_TREE, NULL_TREE));
4984 /* Expand a call to either the entry or exit function profiler. */
4987 expand_builtin_profile_func (bool exitp)
4989 rtx this_rtx, which;
4991 this_rtx = DECL_RTL (current_function_decl);
4992 gcc_assert (MEM_P (this_rtx));
4993 this_rtx = XEXP (this_rtx, 0);
4996 which = profile_function_exit_libfunc;
4998 which = profile_function_entry_libfunc;
5000 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5001 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5008 /* Expand a call to __builtin___clear_cache. */
5011 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5013 #ifndef HAVE_clear_cache
5014 #ifdef CLEAR_INSN_CACHE
5015 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5016 does something. Just do the default expansion to a call to
5020 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5021 does nothing. There is no need to call it. Do nothing. */
5023 #endif /* CLEAR_INSN_CACHE */
5025 /* We have a "clear_cache" insn, and it will handle everything. */
5027 rtx begin_rtx, end_rtx;
5028 enum insn_code icode;
5030 /* We must not expand to a library call. If we did, any
5031 fallback library function in libgcc that might contain a call to
5032 __builtin___clear_cache() would recurse infinitely. */
5033 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5035 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5039 if (HAVE_clear_cache)
5041 icode = CODE_FOR_clear_cache;
5043 begin = CALL_EXPR_ARG (exp, 0);
5044 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5045 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5046 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5047 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5049 end = CALL_EXPR_ARG (exp, 1);
5050 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5051 end_rtx = convert_memory_address (Pmode, end_rtx);
5052 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5053 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5055 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5058 #endif /* HAVE_clear_cache */
5061 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5064 round_trampoline_addr (rtx tramp)
5066 rtx temp, addend, mask;
5068 /* If we don't need too much alignment, we'll have been guaranteed
5069 proper alignment by get_trampoline_type. */
5070 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5073 /* Round address up to desired boundary. */
5074 temp = gen_reg_rtx (Pmode);
5075 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5076 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5078 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5079 temp, 0, OPTAB_LIB_WIDEN);
5080 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5081 temp, 0, OPTAB_LIB_WIDEN);
5087 expand_builtin_init_trampoline (tree exp)
5089 tree t_tramp, t_func, t_chain;
5090 rtx m_tramp, r_tramp, r_chain, tmp;
5092 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5093 POINTER_TYPE, VOID_TYPE))
5096 t_tramp = CALL_EXPR_ARG (exp, 0);
5097 t_func = CALL_EXPR_ARG (exp, 1);
5098 t_chain = CALL_EXPR_ARG (exp, 2);
5100 r_tramp = expand_normal (t_tramp);
5101 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5102 MEM_NOTRAP_P (m_tramp) = 1;
5104 /* The TRAMP argument should be the address of a field within the
5105 local function's FRAME decl. Let's see if we can fill in the
5106 to fill in the MEM_ATTRs for this memory. */
5107 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5108 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5111 tmp = round_trampoline_addr (r_tramp);
5114 m_tramp = change_address (m_tramp, BLKmode, tmp);
5115 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5116 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5119 /* The FUNC argument should be the address of the nested function.
5120 Extract the actual function decl to pass to the hook. */
5121 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5122 t_func = TREE_OPERAND (t_func, 0);
5123 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5125 r_chain = expand_normal (t_chain);
5127 /* Generate insns to initialize the trampoline. */
5128 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5130 trampolines_created = 1;
5135 expand_builtin_adjust_trampoline (tree exp)
5139 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5142 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5143 tramp = round_trampoline_addr (tramp);
5144 if (targetm.calls.trampoline_adjust_address)
5145 tramp = targetm.calls.trampoline_adjust_address (tramp);
5150 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5151 function. The function first checks whether the back end provides
5152 an insn to implement signbit for the respective mode. If not, it
5153 checks whether the floating point format of the value is such that
5154 the sign bit can be extracted. If that is not the case, the
5155 function returns NULL_RTX to indicate that a normal call should be
5156 emitted rather than expanding the function in-line. EXP is the
5157 expression that is a call to the builtin function; if convenient,
5158 the result should be placed in TARGET. */
5160 expand_builtin_signbit (tree exp, rtx target)
5162 const struct real_format *fmt;
5163 enum machine_mode fmode, imode, rmode;
5164 HOST_WIDE_INT hi, lo;
5167 enum insn_code icode;
5169 location_t loc = EXPR_LOCATION (exp);
5171 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5174 arg = CALL_EXPR_ARG (exp, 0);
5175 fmode = TYPE_MODE (TREE_TYPE (arg));
5176 rmode = TYPE_MODE (TREE_TYPE (exp));
5177 fmt = REAL_MODE_FORMAT (fmode);
5179 arg = builtin_save_expr (arg);
5181 /* Expand the argument yielding a RTX expression. */
5182 temp = expand_normal (arg);
5184 /* Check if the back end provides an insn that handles signbit for the
5186 icode = signbit_optab->handlers [(int) fmode].insn_code;
5187 if (icode != CODE_FOR_nothing)
5189 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5190 emit_unop_insn (icode, target, temp, UNKNOWN);
5194 /* For floating point formats without a sign bit, implement signbit
5196 bitpos = fmt->signbit_ro;
5199 /* But we can't do this if the format supports signed zero. */
5200 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5203 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5204 build_real (TREE_TYPE (arg), dconst0));
5205 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5208 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5210 imode = int_mode_for_mode (fmode);
5211 if (imode == BLKmode)
5213 temp = gen_lowpart (imode, temp);
5218 /* Handle targets with different FP word orders. */
5219 if (FLOAT_WORDS_BIG_ENDIAN)
5220 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5222 word = bitpos / BITS_PER_WORD;
5223 temp = operand_subword_force (temp, word, fmode);
5224 bitpos = bitpos % BITS_PER_WORD;
5227 /* Force the intermediate word_mode (or narrower) result into a
5228 register. This avoids attempting to create paradoxical SUBREGs
5229 of floating point modes below. */
5230 temp = force_reg (imode, temp);
5232 /* If the bitpos is within the "result mode" lowpart, the operation
5233 can be implement with a single bitwise AND. Otherwise, we need
5234 a right shift and an AND. */
5236 if (bitpos < GET_MODE_BITSIZE (rmode))
5238 if (bitpos < HOST_BITS_PER_WIDE_INT)
5241 lo = (HOST_WIDE_INT) 1 << bitpos;
5245 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5249 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5250 temp = gen_lowpart (rmode, temp);
5251 temp = expand_binop (rmode, and_optab, temp,
5252 immed_double_const (lo, hi, rmode),
5253 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5257 /* Perform a logical right shift to place the signbit in the least
5258 significant bit, then truncate the result to the desired mode
5259 and mask just this bit. */
5260 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5261 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5262 temp = gen_lowpart (rmode, temp);
5263 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5264 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5270 /* Expand fork or exec calls. TARGET is the desired target of the
5271 call. EXP is the call. FN is the
5272 identificator of the actual function. IGNORE is nonzero if the
5273 value is to be ignored. */
5276 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5281 /* If we are not profiling, just call the function. */
5282 if (!profile_arc_flag)
5285 /* Otherwise call the wrapper. This should be equivalent for the rest of
5286 compiler, so the code does not diverge, and the wrapper may run the
5287 code necessary for keeping the profiling sane. */
5289 switch (DECL_FUNCTION_CODE (fn))
5292 id = get_identifier ("__gcov_fork");
5295 case BUILT_IN_EXECL:
5296 id = get_identifier ("__gcov_execl");
5299 case BUILT_IN_EXECV:
5300 id = get_identifier ("__gcov_execv");
5303 case BUILT_IN_EXECLP:
5304 id = get_identifier ("__gcov_execlp");
5307 case BUILT_IN_EXECLE:
5308 id = get_identifier ("__gcov_execle");
5311 case BUILT_IN_EXECVP:
5312 id = get_identifier ("__gcov_execvp");
5315 case BUILT_IN_EXECVE:
5316 id = get_identifier ("__gcov_execve");
5323 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5324 FUNCTION_DECL, id, TREE_TYPE (fn));
5325 DECL_EXTERNAL (decl) = 1;
5326 TREE_PUBLIC (decl) = 1;
5327 DECL_ARTIFICIAL (decl) = 1;
5328 TREE_NOTHROW (decl) = 1;
5329 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5330 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5331 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5332 return expand_call (call, target, ignore);
5337 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5338 the pointer in these functions is void*, the tree optimizers may remove
5339 casts. The mode computed in expand_builtin isn't reliable either, due
5340 to __sync_bool_compare_and_swap.
5342 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5343 group of builtins. This gives us log2 of the mode size. */
5345 static inline enum machine_mode
5346 get_builtin_sync_mode (int fcode_diff)
5348 /* The size is not negotiable, so ask not to get BLKmode in return
5349 if the target indicates that a smaller size would be better. */
5350 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5353 /* Expand the memory expression LOC and return the appropriate memory operand
5354 for the builtin_sync operations. */
5357 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5361 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5362 addr = convert_memory_address (Pmode, addr);
5364 /* Note that we explicitly do not want any alias information for this
5365 memory, so that we kill all other live memories. Otherwise we don't
5366 satisfy the full barrier semantics of the intrinsic. */
5367 mem = validize_mem (gen_rtx_MEM (mode, addr));
5369 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5370 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5371 MEM_VOLATILE_P (mem) = 1;
5376 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5377 EXP is the CALL_EXPR. CODE is the rtx code
5378 that corresponds to the arithmetic or logical operation from the name;
5379 an exception here is that NOT actually means NAND. TARGET is an optional
5380 place for us to store the results; AFTER is true if this is the
5381 fetch_and_xxx form. IGNORE is true if we don't actually care about
5382 the result of the operation at all. */
5385 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5386 enum rtx_code code, bool after,
5387 rtx target, bool ignore)
5390 enum machine_mode old_mode;
5391 location_t loc = EXPR_LOCATION (exp);
5393 if (code == NOT && warn_sync_nand)
5395 tree fndecl = get_callee_fndecl (exp);
5396 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5398 static bool warned_f_a_n, warned_n_a_f;
5402 case BUILT_IN_FETCH_AND_NAND_1:
5403 case BUILT_IN_FETCH_AND_NAND_2:
5404 case BUILT_IN_FETCH_AND_NAND_4:
5405 case BUILT_IN_FETCH_AND_NAND_8:
5406 case BUILT_IN_FETCH_AND_NAND_16:
5411 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5412 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5413 warned_f_a_n = true;
5416 case BUILT_IN_NAND_AND_FETCH_1:
5417 case BUILT_IN_NAND_AND_FETCH_2:
5418 case BUILT_IN_NAND_AND_FETCH_4:
5419 case BUILT_IN_NAND_AND_FETCH_8:
5420 case BUILT_IN_NAND_AND_FETCH_16:
5425 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5426 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5427 warned_n_a_f = true;
5435 /* Expand the operands. */
5436 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5438 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5439 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5440 of CONST_INTs, where we know the old_mode only from the call argument. */
5441 old_mode = GET_MODE (val);
5442 if (old_mode == VOIDmode)
5443 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5444 val = convert_modes (mode, old_mode, val, 1);
5447 return expand_sync_operation (mem, val, code);
5449 return expand_sync_fetch_operation (mem, val, code, after, target);
5452 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5453 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5454 true if this is the boolean form. TARGET is a place for us to store the
5455 results; this is NOT optional if IS_BOOL is true. */
5458 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5459 bool is_bool, rtx target)
5461 rtx old_val, new_val, mem;
5462 enum machine_mode old_mode;
5464 /* Expand the operands. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5469 mode, EXPAND_NORMAL);
5470 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5471 of CONST_INTs, where we know the old_mode only from the call argument. */
5472 old_mode = GET_MODE (old_val);
5473 if (old_mode == VOIDmode)
5474 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5475 old_val = convert_modes (mode, old_mode, old_val, 1);
5477 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5478 mode, EXPAND_NORMAL);
5479 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5480 of CONST_INTs, where we know the old_mode only from the call argument. */
5481 old_mode = GET_MODE (new_val);
5482 if (old_mode == VOIDmode)
5483 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5484 new_val = convert_modes (mode, old_mode, new_val, 1);
5487 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5489 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5492 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5493 general form is actually an atomic exchange, and some targets only
5494 support a reduced form with the second argument being a constant 1.
5495 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5499 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5503 enum machine_mode old_mode;
5505 /* Expand the operands. */
5506 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5507 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5508 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5509 of CONST_INTs, where we know the old_mode only from the call argument. */
5510 old_mode = GET_MODE (val);
5511 if (old_mode == VOIDmode)
5512 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5513 val = convert_modes (mode, old_mode, val, 1);
5515 return expand_sync_lock_test_and_set (mem, val, target);
5518 /* Expand the __sync_synchronize intrinsic. */
5521 expand_builtin_synchronize (void)
5524 VEC (tree, gc) *v_clobbers;
5526 #ifdef HAVE_memory_barrier
5527 if (HAVE_memory_barrier)
5529 emit_insn (gen_memory_barrier ());
5534 if (synchronize_libfunc != NULL_RTX)
5536 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5540 /* If no explicit memory barrier instruction is available, create an
5541 empty asm stmt with a memory clobber. */
5542 v_clobbers = VEC_alloc (tree, gc, 1);
5543 VEC_quick_push (tree, v_clobbers,
5544 tree_cons (NULL, build_string (6, "memory"), NULL));
5545 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5546 gimple_asm_set_volatile (x, true);
5547 expand_asm_stmt (x);
5550 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5553 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5555 enum insn_code icode;
5557 rtx val = const0_rtx;
5559 /* Expand the operands. */
5560 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5562 /* If there is an explicit operation in the md file, use it. */
5563 icode = sync_lock_release[mode];
5564 if (icode != CODE_FOR_nothing)
5566 if (!insn_data[icode].operand[1].predicate (val, mode))
5567 val = force_reg (mode, val);
5569 insn = GEN_FCN (icode) (mem, val);
5577 /* Otherwise we can implement this operation by emitting a barrier
5578 followed by a store of zero. */
5579 expand_builtin_synchronize ();
5580 emit_move_insn (mem, val);
5583 /* Expand an expression EXP that calls a built-in function,
5584 with result going to TARGET if that's convenient
5585 (and in mode MODE if that's convenient).
5586 SUBTARGET may be used as the target for computing one of EXP's operands.
5587 IGNORE is nonzero if the value is to be ignored. */
5590 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5593 tree fndecl = get_callee_fndecl (exp);
5594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5595 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5597 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5598 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5600 /* When not optimizing, generate calls to library functions for a certain
5603 && !called_as_built_in (fndecl)
5604 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5605 && fcode != BUILT_IN_ALLOCA
5606 && fcode != BUILT_IN_FREE)
5607 return expand_call (exp, target, ignore);
5609 /* The built-in function expanders test for target == const0_rtx
5610 to determine whether the function's result will be ignored. */
5612 target = const0_rtx;
5614 /* If the result of a pure or const built-in function is ignored, and
5615 none of its arguments are volatile, we can avoid expanding the
5616 built-in call and just evaluate the arguments for side-effects. */
5617 if (target == const0_rtx
5618 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5620 bool volatilep = false;
5622 call_expr_arg_iterator iter;
5624 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5625 if (TREE_THIS_VOLATILE (arg))
5633 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5634 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5641 CASE_FLT_FN (BUILT_IN_FABS):
5642 target = expand_builtin_fabs (exp, target, subtarget);
5647 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5648 target = expand_builtin_copysign (exp, target, subtarget);
5653 /* Just do a normal library call if we were unable to fold
5655 CASE_FLT_FN (BUILT_IN_CABS):
5658 CASE_FLT_FN (BUILT_IN_EXP):
5659 CASE_FLT_FN (BUILT_IN_EXP10):
5660 CASE_FLT_FN (BUILT_IN_POW10):
5661 CASE_FLT_FN (BUILT_IN_EXP2):
5662 CASE_FLT_FN (BUILT_IN_EXPM1):
5663 CASE_FLT_FN (BUILT_IN_LOGB):
5664 CASE_FLT_FN (BUILT_IN_LOG):
5665 CASE_FLT_FN (BUILT_IN_LOG10):
5666 CASE_FLT_FN (BUILT_IN_LOG2):
5667 CASE_FLT_FN (BUILT_IN_LOG1P):
5668 CASE_FLT_FN (BUILT_IN_TAN):
5669 CASE_FLT_FN (BUILT_IN_ASIN):
5670 CASE_FLT_FN (BUILT_IN_ACOS):
5671 CASE_FLT_FN (BUILT_IN_ATAN):
5672 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5673 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5674 because of possible accuracy problems. */
5675 if (! flag_unsafe_math_optimizations)
5677 CASE_FLT_FN (BUILT_IN_SQRT):
5678 CASE_FLT_FN (BUILT_IN_FLOOR):
5679 CASE_FLT_FN (BUILT_IN_CEIL):
5680 CASE_FLT_FN (BUILT_IN_TRUNC):
5681 CASE_FLT_FN (BUILT_IN_ROUND):
5682 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5683 CASE_FLT_FN (BUILT_IN_RINT):
5684 target = expand_builtin_mathfn (exp, target, subtarget);
5689 CASE_FLT_FN (BUILT_IN_ILOGB):
5690 if (! flag_unsafe_math_optimizations)
5692 CASE_FLT_FN (BUILT_IN_ISINF):
5693 CASE_FLT_FN (BUILT_IN_FINITE):
5694 case BUILT_IN_ISFINITE:
5695 case BUILT_IN_ISNORMAL:
5696 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5701 CASE_FLT_FN (BUILT_IN_LCEIL):
5702 CASE_FLT_FN (BUILT_IN_LLCEIL):
5703 CASE_FLT_FN (BUILT_IN_LFLOOR):
5704 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5705 target = expand_builtin_int_roundingfn (exp, target);
5710 CASE_FLT_FN (BUILT_IN_LRINT):
5711 CASE_FLT_FN (BUILT_IN_LLRINT):
5712 CASE_FLT_FN (BUILT_IN_LROUND):
5713 CASE_FLT_FN (BUILT_IN_LLROUND):
5714 target = expand_builtin_int_roundingfn_2 (exp, target);
5719 CASE_FLT_FN (BUILT_IN_POW):
5720 target = expand_builtin_pow (exp, target, subtarget);
5725 CASE_FLT_FN (BUILT_IN_POWI):
5726 target = expand_builtin_powi (exp, target, subtarget);
5731 CASE_FLT_FN (BUILT_IN_ATAN2):
5732 CASE_FLT_FN (BUILT_IN_LDEXP):
5733 CASE_FLT_FN (BUILT_IN_SCALB):
5734 CASE_FLT_FN (BUILT_IN_SCALBN):
5735 CASE_FLT_FN (BUILT_IN_SCALBLN):
5736 if (! flag_unsafe_math_optimizations)
5739 CASE_FLT_FN (BUILT_IN_FMOD):
5740 CASE_FLT_FN (BUILT_IN_REMAINDER):
5741 CASE_FLT_FN (BUILT_IN_DREM):
5742 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5747 CASE_FLT_FN (BUILT_IN_CEXPI):
5748 target = expand_builtin_cexpi (exp, target, subtarget);
5749 gcc_assert (target);
5752 CASE_FLT_FN (BUILT_IN_SIN):
5753 CASE_FLT_FN (BUILT_IN_COS):
5754 if (! flag_unsafe_math_optimizations)
5756 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5761 CASE_FLT_FN (BUILT_IN_SINCOS):
5762 if (! flag_unsafe_math_optimizations)
5764 target = expand_builtin_sincos (exp);
5769 case BUILT_IN_APPLY_ARGS:
5770 return expand_builtin_apply_args ();
5772 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5773 FUNCTION with a copy of the parameters described by
5774 ARGUMENTS, and ARGSIZE. It returns a block of memory
5775 allocated on the stack into which is stored all the registers
5776 that might possibly be used for returning the result of a
5777 function. ARGUMENTS is the value returned by
5778 __builtin_apply_args. ARGSIZE is the number of bytes of
5779 arguments that must be copied. ??? How should this value be
5780 computed? We'll also need a safe worst case value for varargs
5782 case BUILT_IN_APPLY:
5783 if (!validate_arglist (exp, POINTER_TYPE,
5784 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5785 && !validate_arglist (exp, REFERENCE_TYPE,
5786 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5792 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5793 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5794 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5796 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5799 /* __builtin_return (RESULT) causes the function to return the
5800 value described by RESULT. RESULT is address of the block of
5801 memory returned by __builtin_apply. */
5802 case BUILT_IN_RETURN:
5803 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5804 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5807 case BUILT_IN_SAVEREGS:
5808 return expand_builtin_saveregs ();
5810 case BUILT_IN_ARGS_INFO:
5811 return expand_builtin_args_info (exp);
5813 case BUILT_IN_VA_ARG_PACK:
5814 /* All valid uses of __builtin_va_arg_pack () are removed during
5816 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5819 case BUILT_IN_VA_ARG_PACK_LEN:
5820 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5822 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5825 /* Return the address of the first anonymous stack arg. */
5826 case BUILT_IN_NEXT_ARG:
5827 if (fold_builtin_next_arg (exp, false))
5829 return expand_builtin_next_arg ();
5831 case BUILT_IN_CLEAR_CACHE:
5832 target = expand_builtin___clear_cache (exp);
5837 case BUILT_IN_CLASSIFY_TYPE:
5838 return expand_builtin_classify_type (exp);
5840 case BUILT_IN_CONSTANT_P:
5843 case BUILT_IN_FRAME_ADDRESS:
5844 case BUILT_IN_RETURN_ADDRESS:
5845 return expand_builtin_frame_address (fndecl, exp);
5847 /* Returns the address of the area where the structure is returned.
5849 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5850 if (call_expr_nargs (exp) != 0
5851 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5852 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5855 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5857 case BUILT_IN_ALLOCA:
5858 target = expand_builtin_alloca (exp, target);
5863 case BUILT_IN_STACK_SAVE:
5864 return expand_stack_save ();
5866 case BUILT_IN_STACK_RESTORE:
5867 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5870 case BUILT_IN_BSWAP32:
5871 case BUILT_IN_BSWAP64:
5872 target = expand_builtin_bswap (exp, target, subtarget);
5878 CASE_INT_FN (BUILT_IN_FFS):
5879 case BUILT_IN_FFSIMAX:
5880 target = expand_builtin_unop (target_mode, exp, target,
5881 subtarget, ffs_optab);
5886 CASE_INT_FN (BUILT_IN_CLZ):
5887 case BUILT_IN_CLZIMAX:
5888 target = expand_builtin_unop (target_mode, exp, target,
5889 subtarget, clz_optab);
5894 CASE_INT_FN (BUILT_IN_CTZ):
5895 case BUILT_IN_CTZIMAX:
5896 target = expand_builtin_unop (target_mode, exp, target,
5897 subtarget, ctz_optab);
5902 CASE_INT_FN (BUILT_IN_POPCOUNT):
5903 case BUILT_IN_POPCOUNTIMAX:
5904 target = expand_builtin_unop (target_mode, exp, target,
5905 subtarget, popcount_optab);
5910 CASE_INT_FN (BUILT_IN_PARITY):
5911 case BUILT_IN_PARITYIMAX:
5912 target = expand_builtin_unop (target_mode, exp, target,
5913 subtarget, parity_optab);
5918 case BUILT_IN_STRLEN:
5919 target = expand_builtin_strlen (exp, target, target_mode);
5924 case BUILT_IN_STRCPY:
5925 target = expand_builtin_strcpy (exp, target);
5930 case BUILT_IN_STRNCPY:
5931 target = expand_builtin_strncpy (exp, target);
5936 case BUILT_IN_STPCPY:
5937 target = expand_builtin_stpcpy (exp, target, mode);
5942 case BUILT_IN_MEMCPY:
5943 target = expand_builtin_memcpy (exp, target);
5948 case BUILT_IN_MEMPCPY:
5949 target = expand_builtin_mempcpy (exp, target, mode);
5954 case BUILT_IN_MEMSET:
5955 target = expand_builtin_memset (exp, target, mode);
5960 case BUILT_IN_BZERO:
5961 target = expand_builtin_bzero (exp);
5966 case BUILT_IN_STRCMP:
5967 target = expand_builtin_strcmp (exp, target);
5972 case BUILT_IN_STRNCMP:
5973 target = expand_builtin_strncmp (exp, target, mode);
5979 case BUILT_IN_MEMCMP:
5980 target = expand_builtin_memcmp (exp, target, mode);
5985 case BUILT_IN_SETJMP:
5986 /* This should have been lowered to the builtins below. */
5989 case BUILT_IN_SETJMP_SETUP:
5990 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5991 and the receiver label. */
5992 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5994 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5995 VOIDmode, EXPAND_NORMAL);
5996 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5997 rtx label_r = label_rtx (label);
5999 /* This is copied from the handling of non-local gotos. */
6000 expand_builtin_setjmp_setup (buf_addr, label_r);
6001 nonlocal_goto_handler_labels
6002 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6003 nonlocal_goto_handler_labels);
6004 /* ??? Do not let expand_label treat us as such since we would
6005 not want to be both on the list of non-local labels and on
6006 the list of forced labels. */
6007 FORCED_LABEL (label) = 0;
6012 case BUILT_IN_SETJMP_DISPATCHER:
6013 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6014 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6016 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6017 rtx label_r = label_rtx (label);
6019 /* Remove the dispatcher label from the list of non-local labels
6020 since the receiver labels have been added to it above. */
6021 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6026 case BUILT_IN_SETJMP_RECEIVER:
6027 /* __builtin_setjmp_receiver is passed the receiver label. */
6028 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6030 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6031 rtx label_r = label_rtx (label);
6033 expand_builtin_setjmp_receiver (label_r);
6038 /* __builtin_longjmp is passed a pointer to an array of five words.
6039 It's similar to the C library longjmp function but works with
6040 __builtin_setjmp above. */
6041 case BUILT_IN_LONGJMP:
6042 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6044 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6045 VOIDmode, EXPAND_NORMAL);
6046 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6048 if (value != const1_rtx)
6050 error ("%<__builtin_longjmp%> second argument must be 1");
6054 expand_builtin_longjmp (buf_addr, value);
6059 case BUILT_IN_NONLOCAL_GOTO:
6060 target = expand_builtin_nonlocal_goto (exp);
6065 /* This updates the setjmp buffer that is its argument with the value
6066 of the current stack pointer. */
6067 case BUILT_IN_UPDATE_SETJMP_BUF:
6068 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6071 = expand_normal (CALL_EXPR_ARG (exp, 0));
6073 expand_builtin_update_setjmp_buf (buf_addr);
6079 expand_builtin_trap ();
6082 case BUILT_IN_UNREACHABLE:
6083 expand_builtin_unreachable ();
6086 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6087 case BUILT_IN_SIGNBITD32:
6088 case BUILT_IN_SIGNBITD64:
6089 case BUILT_IN_SIGNBITD128:
6090 target = expand_builtin_signbit (exp, target);
6095 /* Various hooks for the DWARF 2 __throw routine. */
6096 case BUILT_IN_UNWIND_INIT:
6097 expand_builtin_unwind_init ();
6099 case BUILT_IN_DWARF_CFA:
6100 return virtual_cfa_rtx;
6101 #ifdef DWARF2_UNWIND_INFO
6102 case BUILT_IN_DWARF_SP_COLUMN:
6103 return expand_builtin_dwarf_sp_column ();
6104 case BUILT_IN_INIT_DWARF_REG_SIZES:
6105 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6108 case BUILT_IN_FROB_RETURN_ADDR:
6109 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6110 case BUILT_IN_EXTRACT_RETURN_ADDR:
6111 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6112 case BUILT_IN_EH_RETURN:
6113 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6114 CALL_EXPR_ARG (exp, 1));
6116 #ifdef EH_RETURN_DATA_REGNO
6117 case BUILT_IN_EH_RETURN_DATA_REGNO:
6118 return expand_builtin_eh_return_data_regno (exp);
6120 case BUILT_IN_EXTEND_POINTER:
6121 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6122 case BUILT_IN_EH_POINTER:
6123 return expand_builtin_eh_pointer (exp);
6124 case BUILT_IN_EH_FILTER:
6125 return expand_builtin_eh_filter (exp);
6126 case BUILT_IN_EH_COPY_VALUES:
6127 return expand_builtin_eh_copy_values (exp);
6129 case BUILT_IN_VA_START:
6130 return expand_builtin_va_start (exp);
6131 case BUILT_IN_VA_END:
6132 return expand_builtin_va_end (exp);
6133 case BUILT_IN_VA_COPY:
6134 return expand_builtin_va_copy (exp);
6135 case BUILT_IN_EXPECT:
6136 return expand_builtin_expect (exp, target);
6137 case BUILT_IN_PREFETCH:
6138 expand_builtin_prefetch (exp);
6141 case BUILT_IN_PROFILE_FUNC_ENTER:
6142 return expand_builtin_profile_func (false);
6143 case BUILT_IN_PROFILE_FUNC_EXIT:
6144 return expand_builtin_profile_func (true);
6146 case BUILT_IN_INIT_TRAMPOLINE:
6147 return expand_builtin_init_trampoline (exp);
6148 case BUILT_IN_ADJUST_TRAMPOLINE:
6149 return expand_builtin_adjust_trampoline (exp);
6152 case BUILT_IN_EXECL:
6153 case BUILT_IN_EXECV:
6154 case BUILT_IN_EXECLP:
6155 case BUILT_IN_EXECLE:
6156 case BUILT_IN_EXECVP:
6157 case BUILT_IN_EXECVE:
6158 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6163 case BUILT_IN_FETCH_AND_ADD_1:
6164 case BUILT_IN_FETCH_AND_ADD_2:
6165 case BUILT_IN_FETCH_AND_ADD_4:
6166 case BUILT_IN_FETCH_AND_ADD_8:
6167 case BUILT_IN_FETCH_AND_ADD_16:
6168 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6169 target = expand_builtin_sync_operation (mode, exp, PLUS,
6170 false, target, ignore);
6175 case BUILT_IN_FETCH_AND_SUB_1:
6176 case BUILT_IN_FETCH_AND_SUB_2:
6177 case BUILT_IN_FETCH_AND_SUB_4:
6178 case BUILT_IN_FETCH_AND_SUB_8:
6179 case BUILT_IN_FETCH_AND_SUB_16:
6180 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6181 target = expand_builtin_sync_operation (mode, exp, MINUS,
6182 false, target, ignore);
6187 case BUILT_IN_FETCH_AND_OR_1:
6188 case BUILT_IN_FETCH_AND_OR_2:
6189 case BUILT_IN_FETCH_AND_OR_4:
6190 case BUILT_IN_FETCH_AND_OR_8:
6191 case BUILT_IN_FETCH_AND_OR_16:
6192 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6193 target = expand_builtin_sync_operation (mode, exp, IOR,
6194 false, target, ignore);
6199 case BUILT_IN_FETCH_AND_AND_1:
6200 case BUILT_IN_FETCH_AND_AND_2:
6201 case BUILT_IN_FETCH_AND_AND_4:
6202 case BUILT_IN_FETCH_AND_AND_8:
6203 case BUILT_IN_FETCH_AND_AND_16:
6204 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6205 target = expand_builtin_sync_operation (mode, exp, AND,
6206 false, target, ignore);
6211 case BUILT_IN_FETCH_AND_XOR_1:
6212 case BUILT_IN_FETCH_AND_XOR_2:
6213 case BUILT_IN_FETCH_AND_XOR_4:
6214 case BUILT_IN_FETCH_AND_XOR_8:
6215 case BUILT_IN_FETCH_AND_XOR_16:
6216 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6217 target = expand_builtin_sync_operation (mode, exp, XOR,
6218 false, target, ignore);
6223 case BUILT_IN_FETCH_AND_NAND_1:
6224 case BUILT_IN_FETCH_AND_NAND_2:
6225 case BUILT_IN_FETCH_AND_NAND_4:
6226 case BUILT_IN_FETCH_AND_NAND_8:
6227 case BUILT_IN_FETCH_AND_NAND_16:
6228 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6229 target = expand_builtin_sync_operation (mode, exp, NOT,
6230 false, target, ignore);
6235 case BUILT_IN_ADD_AND_FETCH_1:
6236 case BUILT_IN_ADD_AND_FETCH_2:
6237 case BUILT_IN_ADD_AND_FETCH_4:
6238 case BUILT_IN_ADD_AND_FETCH_8:
6239 case BUILT_IN_ADD_AND_FETCH_16:
6240 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6241 target = expand_builtin_sync_operation (mode, exp, PLUS,
6242 true, target, ignore);
6247 case BUILT_IN_SUB_AND_FETCH_1:
6248 case BUILT_IN_SUB_AND_FETCH_2:
6249 case BUILT_IN_SUB_AND_FETCH_4:
6250 case BUILT_IN_SUB_AND_FETCH_8:
6251 case BUILT_IN_SUB_AND_FETCH_16:
6252 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6253 target = expand_builtin_sync_operation (mode, exp, MINUS,
6254 true, target, ignore);
6259 case BUILT_IN_OR_AND_FETCH_1:
6260 case BUILT_IN_OR_AND_FETCH_2:
6261 case BUILT_IN_OR_AND_FETCH_4:
6262 case BUILT_IN_OR_AND_FETCH_8:
6263 case BUILT_IN_OR_AND_FETCH_16:
6264 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6265 target = expand_builtin_sync_operation (mode, exp, IOR,
6266 true, target, ignore);
6271 case BUILT_IN_AND_AND_FETCH_1:
6272 case BUILT_IN_AND_AND_FETCH_2:
6273 case BUILT_IN_AND_AND_FETCH_4:
6274 case BUILT_IN_AND_AND_FETCH_8:
6275 case BUILT_IN_AND_AND_FETCH_16:
6276 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6277 target = expand_builtin_sync_operation (mode, exp, AND,
6278 true, target, ignore);
6283 case BUILT_IN_XOR_AND_FETCH_1:
6284 case BUILT_IN_XOR_AND_FETCH_2:
6285 case BUILT_IN_XOR_AND_FETCH_4:
6286 case BUILT_IN_XOR_AND_FETCH_8:
6287 case BUILT_IN_XOR_AND_FETCH_16:
6288 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6289 target = expand_builtin_sync_operation (mode, exp, XOR,
6290 true, target, ignore);
6295 case BUILT_IN_NAND_AND_FETCH_1:
6296 case BUILT_IN_NAND_AND_FETCH_2:
6297 case BUILT_IN_NAND_AND_FETCH_4:
6298 case BUILT_IN_NAND_AND_FETCH_8:
6299 case BUILT_IN_NAND_AND_FETCH_16:
6300 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6301 target = expand_builtin_sync_operation (mode, exp, NOT,
6302 true, target, ignore);
6307 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6308 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6309 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6310 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6311 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6312 if (mode == VOIDmode)
6313 mode = TYPE_MODE (boolean_type_node);
6314 if (!target || !register_operand (target, mode))
6315 target = gen_reg_rtx (mode);
6317 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6318 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6323 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6324 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6325 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6326 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6327 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6329 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6334 case BUILT_IN_LOCK_TEST_AND_SET_1:
6335 case BUILT_IN_LOCK_TEST_AND_SET_2:
6336 case BUILT_IN_LOCK_TEST_AND_SET_4:
6337 case BUILT_IN_LOCK_TEST_AND_SET_8:
6338 case BUILT_IN_LOCK_TEST_AND_SET_16:
6339 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6340 target = expand_builtin_lock_test_and_set (mode, exp, target);
6345 case BUILT_IN_LOCK_RELEASE_1:
6346 case BUILT_IN_LOCK_RELEASE_2:
6347 case BUILT_IN_LOCK_RELEASE_4:
6348 case BUILT_IN_LOCK_RELEASE_8:
6349 case BUILT_IN_LOCK_RELEASE_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6351 expand_builtin_lock_release (mode, exp);
6354 case BUILT_IN_SYNCHRONIZE:
6355 expand_builtin_synchronize ();
6358 case BUILT_IN_OBJECT_SIZE:
6359 return expand_builtin_object_size (exp);
6361 case BUILT_IN_MEMCPY_CHK:
6362 case BUILT_IN_MEMPCPY_CHK:
6363 case BUILT_IN_MEMMOVE_CHK:
6364 case BUILT_IN_MEMSET_CHK:
6365 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6370 case BUILT_IN_STRCPY_CHK:
6371 case BUILT_IN_STPCPY_CHK:
6372 case BUILT_IN_STRNCPY_CHK:
6373 case BUILT_IN_STRCAT_CHK:
6374 case BUILT_IN_STRNCAT_CHK:
6375 case BUILT_IN_SNPRINTF_CHK:
6376 case BUILT_IN_VSNPRINTF_CHK:
6377 maybe_emit_chk_warning (exp, fcode);
6380 case BUILT_IN_SPRINTF_CHK:
6381 case BUILT_IN_VSPRINTF_CHK:
6382 maybe_emit_sprintf_chk_warning (exp, fcode);
6386 maybe_emit_free_warning (exp);
6389 default: /* just do library call, if unknown builtin */
6393 /* The switch statement above can drop through to cause the function
6394 to be called normally. */
6395 return expand_call (exp, target, ignore);
6398 /* Determine whether a tree node represents a call to a built-in
6399 function. If the tree T is a call to a built-in function with
6400 the right number of arguments of the appropriate types, return
6401 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6402 Otherwise the return value is END_BUILTINS. */
6404 enum built_in_function
6405 builtin_mathfn_code (const_tree t)
6407 const_tree fndecl, arg, parmlist;
6408 const_tree argtype, parmtype;
6409 const_call_expr_arg_iterator iter;
6411 if (TREE_CODE (t) != CALL_EXPR
6412 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6413 return END_BUILTINS;
6415 fndecl = get_callee_fndecl (t);
6416 if (fndecl == NULL_TREE
6417 || TREE_CODE (fndecl) != FUNCTION_DECL
6418 || ! DECL_BUILT_IN (fndecl)
6419 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6420 return END_BUILTINS;
6422 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6423 init_const_call_expr_arg_iterator (t, &iter);
6424 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6426 /* If a function doesn't take a variable number of arguments,
6427 the last element in the list will have type `void'. */
6428 parmtype = TREE_VALUE (parmlist);
6429 if (VOID_TYPE_P (parmtype))
6431 if (more_const_call_expr_args_p (&iter))
6432 return END_BUILTINS;
6433 return DECL_FUNCTION_CODE (fndecl);
6436 if (! more_const_call_expr_args_p (&iter))
6437 return END_BUILTINS;
6439 arg = next_const_call_expr_arg (&iter);
6440 argtype = TREE_TYPE (arg);
6442 if (SCALAR_FLOAT_TYPE_P (parmtype))
6444 if (! SCALAR_FLOAT_TYPE_P (argtype))
6445 return END_BUILTINS;
6447 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6449 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6450 return END_BUILTINS;
6452 else if (POINTER_TYPE_P (parmtype))
6454 if (! POINTER_TYPE_P (argtype))
6455 return END_BUILTINS;
6457 else if (INTEGRAL_TYPE_P (parmtype))
6459 if (! INTEGRAL_TYPE_P (argtype))
6460 return END_BUILTINS;
6463 return END_BUILTINS;
6466 /* Variable-length argument list. */
6467 return DECL_FUNCTION_CODE (fndecl);
6470 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6471 evaluate to a constant. */
6474 fold_builtin_constant_p (tree arg)
6476 /* We return 1 for a numeric type that's known to be a constant
6477 value at compile-time or for an aggregate type that's a
6478 literal constant. */
6481 /* If we know this is a constant, emit the constant of one. */
6482 if (CONSTANT_CLASS_P (arg)
6483 || (TREE_CODE (arg) == CONSTRUCTOR
6484 && TREE_CONSTANT (arg)))
6485 return integer_one_node;
6486 if (TREE_CODE (arg) == ADDR_EXPR)
6488 tree op = TREE_OPERAND (arg, 0);
6489 if (TREE_CODE (op) == STRING_CST
6490 || (TREE_CODE (op) == ARRAY_REF
6491 && integer_zerop (TREE_OPERAND (op, 1))
6492 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6493 return integer_one_node;
6496 /* If this expression has side effects, show we don't know it to be a
6497 constant. Likewise if it's a pointer or aggregate type since in
6498 those case we only want literals, since those are only optimized
6499 when generating RTL, not later.
6500 And finally, if we are compiling an initializer, not code, we
6501 need to return a definite result now; there's not going to be any
6502 more optimization done. */
6503 if (TREE_SIDE_EFFECTS (arg)
6504 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6505 || POINTER_TYPE_P (TREE_TYPE (arg))
6507 || folding_initializer)
6508 return integer_zero_node;
6513 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6514 return it as a truthvalue. */
6517 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6519 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6521 fn = built_in_decls[BUILT_IN_EXPECT];
6522 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6523 ret_type = TREE_TYPE (TREE_TYPE (fn));
6524 pred_type = TREE_VALUE (arg_types);
6525 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6527 pred = fold_convert_loc (loc, pred_type, pred);
6528 expected = fold_convert_loc (loc, expected_type, expected);
6529 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6531 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6532 build_int_cst (ret_type, 0));
6535 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6536 NULL_TREE if no simplification is possible. */
6539 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6542 enum tree_code code;
6544 /* If this is a builtin_expect within a builtin_expect keep the
6545 inner one. See through a comparison against a constant. It
6546 might have been added to create a thruthvalue. */
6548 if (COMPARISON_CLASS_P (inner)
6549 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6550 inner = TREE_OPERAND (inner, 0);
6552 if (TREE_CODE (inner) == CALL_EXPR
6553 && (fndecl = get_callee_fndecl (inner))
6554 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6555 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6558 /* Distribute the expected value over short-circuiting operators.
6559 See through the cast from truthvalue_type_node to long. */
6561 while (TREE_CODE (inner) == NOP_EXPR
6562 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6563 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6564 inner = TREE_OPERAND (inner, 0);
6566 code = TREE_CODE (inner);
6567 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6569 tree op0 = TREE_OPERAND (inner, 0);
6570 tree op1 = TREE_OPERAND (inner, 1);
6572 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6573 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6574 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6576 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6579 /* If the argument isn't invariant then there's nothing else we can do. */
6580 if (!TREE_CONSTANT (arg0))
6583 /* If we expect that a comparison against the argument will fold to
6584 a constant return the constant. In practice, this means a true
6585 constant or the address of a non-weak symbol. */
6588 if (TREE_CODE (inner) == ADDR_EXPR)
6592 inner = TREE_OPERAND (inner, 0);
6594 while (TREE_CODE (inner) == COMPONENT_REF
6595 || TREE_CODE (inner) == ARRAY_REF);
6596 if ((TREE_CODE (inner) == VAR_DECL
6597 || TREE_CODE (inner) == FUNCTION_DECL)
6598 && DECL_WEAK (inner))
6602 /* Otherwise, ARG0 already has the proper type for the return value. */
6606 /* Fold a call to __builtin_classify_type with argument ARG. */
6609 fold_builtin_classify_type (tree arg)
6612 return build_int_cst (NULL_TREE, no_type_class);
6614 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6617 /* Fold a call to __builtin_strlen with argument ARG. */
6620 fold_builtin_strlen (location_t loc, tree type, tree arg)
6622 if (!validate_arg (arg, POINTER_TYPE))
6626 tree len = c_strlen (arg, 0);
6629 return fold_convert_loc (loc, type, len);
6635 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6638 fold_builtin_inf (location_t loc, tree type, int warn)
6640 REAL_VALUE_TYPE real;
6642 /* __builtin_inff is intended to be usable to define INFINITY on all
6643 targets. If an infinity is not available, INFINITY expands "to a
6644 positive constant of type float that overflows at translation
6645 time", footnote "In this case, using INFINITY will violate the
6646 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6647 Thus we pedwarn to ensure this constraint violation is
6649 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6650 pedwarn (loc, 0, "target format does not support infinity");
6653 return build_real (type, real);
6656 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6659 fold_builtin_nan (tree arg, tree type, int quiet)
6661 REAL_VALUE_TYPE real;
6664 if (!validate_arg (arg, POINTER_TYPE))
6666 str = c_getstr (arg);
6670 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6673 return build_real (type, real);
6676 /* Return true if the floating point expression T has an integer value.
6677 We also allow +Inf, -Inf and NaN to be considered integer values. */
6680 integer_valued_real_p (tree t)
6682 switch (TREE_CODE (t))
6689 return integer_valued_real_p (TREE_OPERAND (t, 0));
6694 return integer_valued_real_p (TREE_OPERAND (t, 1));
6701 return integer_valued_real_p (TREE_OPERAND (t, 0))
6702 && integer_valued_real_p (TREE_OPERAND (t, 1));
6705 return integer_valued_real_p (TREE_OPERAND (t, 1))
6706 && integer_valued_real_p (TREE_OPERAND (t, 2));
6709 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6713 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6714 if (TREE_CODE (type) == INTEGER_TYPE)
6716 if (TREE_CODE (type) == REAL_TYPE)
6717 return integer_valued_real_p (TREE_OPERAND (t, 0));
6722 switch (builtin_mathfn_code (t))
6724 CASE_FLT_FN (BUILT_IN_CEIL):
6725 CASE_FLT_FN (BUILT_IN_FLOOR):
6726 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6727 CASE_FLT_FN (BUILT_IN_RINT):
6728 CASE_FLT_FN (BUILT_IN_ROUND):
6729 CASE_FLT_FN (BUILT_IN_TRUNC):
6732 CASE_FLT_FN (BUILT_IN_FMIN):
6733 CASE_FLT_FN (BUILT_IN_FMAX):
6734 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6735 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6748 /* FNDECL is assumed to be a builtin where truncation can be propagated
6749 across (for instance floor((double)f) == (double)floorf (f).
6750 Do the transformation for a call with argument ARG. */
6753 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6755 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6757 if (!validate_arg (arg, REAL_TYPE))
6760 /* Integer rounding functions are idempotent. */
6761 if (fcode == builtin_mathfn_code (arg))
6764 /* If argument is already integer valued, and we don't need to worry
6765 about setting errno, there's no need to perform rounding. */
6766 if (! flag_errno_math && integer_valued_real_p (arg))
6771 tree arg0 = strip_float_extensions (arg);
6772 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6773 tree newtype = TREE_TYPE (arg0);
6776 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6777 && (decl = mathfn_built_in (newtype, fcode)))
6778 return fold_convert_loc (loc, ftype,
6779 build_call_expr_loc (loc, decl, 1,
6780 fold_convert_loc (loc,
6787 /* FNDECL is assumed to be builtin which can narrow the FP type of
6788 the argument, for instance lround((double)f) -> lroundf (f).
6789 Do the transformation for a call with argument ARG. */
6792 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6794 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6796 if (!validate_arg (arg, REAL_TYPE))
6799 /* If argument is already integer valued, and we don't need to worry
6800 about setting errno, there's no need to perform rounding. */
6801 if (! flag_errno_math && integer_valued_real_p (arg))
6802 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6803 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6807 tree ftype = TREE_TYPE (arg);
6808 tree arg0 = strip_float_extensions (arg);
6809 tree newtype = TREE_TYPE (arg0);
6812 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6813 && (decl = mathfn_built_in (newtype, fcode)))
6814 return build_call_expr_loc (loc, decl, 1,
6815 fold_convert_loc (loc, newtype, arg0));
6818 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6819 sizeof (long long) == sizeof (long). */
6820 if (TYPE_PRECISION (long_long_integer_type_node)
6821 == TYPE_PRECISION (long_integer_type_node))
6823 tree newfn = NULL_TREE;
6826 CASE_FLT_FN (BUILT_IN_LLCEIL):
6827 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6830 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6831 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6834 CASE_FLT_FN (BUILT_IN_LLROUND):
6835 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6838 CASE_FLT_FN (BUILT_IN_LLRINT):
6839 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6848 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6849 return fold_convert_loc (loc,
6850 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6857 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6858 return type. Return NULL_TREE if no simplification can be made. */
6861 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6865 if (!validate_arg (arg, COMPLEX_TYPE)
6866 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6869 /* Calculate the result when the argument is a constant. */
6870 if (TREE_CODE (arg) == COMPLEX_CST
6871 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6875 if (TREE_CODE (arg) == COMPLEX_EXPR)
6877 tree real = TREE_OPERAND (arg, 0);
6878 tree imag = TREE_OPERAND (arg, 1);
6880 /* If either part is zero, cabs is fabs of the other. */
6881 if (real_zerop (real))
6882 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6883 if (real_zerop (imag))
6884 return fold_build1_loc (loc, ABS_EXPR, type, real);
6886 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6887 if (flag_unsafe_math_optimizations
6888 && operand_equal_p (real, imag, OEP_PURE_SAME))
6890 const REAL_VALUE_TYPE sqrt2_trunc
6891 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6893 return fold_build2_loc (loc, MULT_EXPR, type,
6894 fold_build1_loc (loc, ABS_EXPR, type, real),
6895 build_real (type, sqrt2_trunc));
6899 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6900 if (TREE_CODE (arg) == NEGATE_EXPR
6901 || TREE_CODE (arg) == CONJ_EXPR)
6902 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6904 /* Don't do this when optimizing for size. */
6905 if (flag_unsafe_math_optimizations
6906 && optimize && optimize_function_for_speed_p (cfun))
6908 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6910 if (sqrtfn != NULL_TREE)
6912 tree rpart, ipart, result;
6914 arg = builtin_save_expr (arg);
6916 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6917 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6919 rpart = builtin_save_expr (rpart);
6920 ipart = builtin_save_expr (ipart);
6922 result = fold_build2_loc (loc, PLUS_EXPR, type,
6923 fold_build2_loc (loc, MULT_EXPR, type,
6925 fold_build2_loc (loc, MULT_EXPR, type,
6928 return build_call_expr_loc (loc, sqrtfn, 1, result);
6935 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6936 Return NULL_TREE if no simplification can be made. */
6939 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6942 enum built_in_function fcode;
6945 if (!validate_arg (arg, REAL_TYPE))
6948 /* Calculate the result when the argument is a constant. */
6949 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6952 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6953 fcode = builtin_mathfn_code (arg);
6954 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6956 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6957 arg = fold_build2_loc (loc, MULT_EXPR, type,
6958 CALL_EXPR_ARG (arg, 0),
6959 build_real (type, dconsthalf));
6960 return build_call_expr_loc (loc, expfn, 1, arg);
6963 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6964 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6966 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6970 tree arg0 = CALL_EXPR_ARG (arg, 0);
6972 /* The inner root was either sqrt or cbrt. */
6973 /* This was a conditional expression but it triggered a bug
6975 REAL_VALUE_TYPE dconstroot;
6976 if (BUILTIN_SQRT_P (fcode))
6977 dconstroot = dconsthalf;
6979 dconstroot = dconst_third ();
6981 /* Adjust for the outer root. */
6982 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6983 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6984 tree_root = build_real (type, dconstroot);
6985 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6989 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6990 if (flag_unsafe_math_optimizations
6991 && (fcode == BUILT_IN_POW
6992 || fcode == BUILT_IN_POWF
6993 || fcode == BUILT_IN_POWL))
6995 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6996 tree arg0 = CALL_EXPR_ARG (arg, 0);
6997 tree arg1 = CALL_EXPR_ARG (arg, 1);
6999 if (!tree_expr_nonnegative_p (arg0))
7000 arg0 = build1 (ABS_EXPR, type, arg0);
7001 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7002 build_real (type, dconsthalf));
7003 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7009 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7010 Return NULL_TREE if no simplification can be made. */
7013 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7015 const enum built_in_function fcode = builtin_mathfn_code (arg);
7018 if (!validate_arg (arg, REAL_TYPE))
7021 /* Calculate the result when the argument is a constant. */
7022 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7025 if (flag_unsafe_math_optimizations)
7027 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7028 if (BUILTIN_EXPONENT_P (fcode))
7030 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7031 const REAL_VALUE_TYPE third_trunc =
7032 real_value_truncate (TYPE_MODE (type), dconst_third ());
7033 arg = fold_build2_loc (loc, MULT_EXPR, type,
7034 CALL_EXPR_ARG (arg, 0),
7035 build_real (type, third_trunc));
7036 return build_call_expr_loc (loc, expfn, 1, arg);
7039 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7040 if (BUILTIN_SQRT_P (fcode))
7042 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7046 tree arg0 = CALL_EXPR_ARG (arg, 0);
7048 REAL_VALUE_TYPE dconstroot = dconst_third ();
7050 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7051 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7052 tree_root = build_real (type, dconstroot);
7053 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7057 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7058 if (BUILTIN_CBRT_P (fcode))
7060 tree arg0 = CALL_EXPR_ARG (arg, 0);
7061 if (tree_expr_nonnegative_p (arg0))
7063 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7068 REAL_VALUE_TYPE dconstroot;
7070 real_arithmetic (&dconstroot, MULT_EXPR,
7071 dconst_third_ptr (), dconst_third_ptr ());
7072 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7073 tree_root = build_real (type, dconstroot);
7074 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7079 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7080 if (fcode == BUILT_IN_POW
7081 || fcode == BUILT_IN_POWF
7082 || fcode == BUILT_IN_POWL)
7084 tree arg00 = CALL_EXPR_ARG (arg, 0);
7085 tree arg01 = CALL_EXPR_ARG (arg, 1);
7086 if (tree_expr_nonnegative_p (arg00))
7088 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7089 const REAL_VALUE_TYPE dconstroot
7090 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7091 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7092 build_real (type, dconstroot));
7093 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7100 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7101 TYPE is the type of the return value. Return NULL_TREE if no
7102 simplification can be made. */
7105 fold_builtin_cos (location_t loc,
7106 tree arg, tree type, tree fndecl)
7110 if (!validate_arg (arg, REAL_TYPE))
7113 /* Calculate the result when the argument is a constant. */
7114 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7117 /* Optimize cos(-x) into cos (x). */
7118 if ((narg = fold_strip_sign_ops (arg)))
7119 return build_call_expr_loc (loc, fndecl, 1, narg);
7124 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7125 Return NULL_TREE if no simplification can be made. */
7128 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7130 if (validate_arg (arg, REAL_TYPE))
7134 /* Calculate the result when the argument is a constant. */
7135 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7138 /* Optimize cosh(-x) into cosh (x). */
7139 if ((narg = fold_strip_sign_ops (arg)))
7140 return build_call_expr_loc (loc, fndecl, 1, narg);
7146 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7147 argument ARG. TYPE is the type of the return value. Return
7148 NULL_TREE if no simplification can be made. */
7151 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7154 if (validate_arg (arg, COMPLEX_TYPE)
7155 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7159 /* Calculate the result when the argument is a constant. */
7160 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7163 /* Optimize fn(-x) into fn(x). */
7164 if ((tmp = fold_strip_sign_ops (arg)))
7165 return build_call_expr_loc (loc, fndecl, 1, tmp);
7171 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7172 Return NULL_TREE if no simplification can be made. */
7175 fold_builtin_tan (tree arg, tree type)
7177 enum built_in_function fcode;
7180 if (!validate_arg (arg, REAL_TYPE))
7183 /* Calculate the result when the argument is a constant. */
7184 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7187 /* Optimize tan(atan(x)) = x. */
7188 fcode = builtin_mathfn_code (arg);
7189 if (flag_unsafe_math_optimizations
7190 && (fcode == BUILT_IN_ATAN
7191 || fcode == BUILT_IN_ATANF
7192 || fcode == BUILT_IN_ATANL))
7193 return CALL_EXPR_ARG (arg, 0);
7198 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7199 NULL_TREE if no simplification can be made. */
7202 fold_builtin_sincos (location_t loc,
7203 tree arg0, tree arg1, tree arg2)
7208 if (!validate_arg (arg0, REAL_TYPE)
7209 || !validate_arg (arg1, POINTER_TYPE)
7210 || !validate_arg (arg2, POINTER_TYPE))
7213 type = TREE_TYPE (arg0);
7215 /* Calculate the result when the argument is a constant. */
7216 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7219 /* Canonicalize sincos to cexpi. */
7220 if (!TARGET_C99_FUNCTIONS)
7222 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7226 call = build_call_expr_loc (loc, fn, 1, arg0);
7227 call = builtin_save_expr (call);
7229 return build2 (COMPOUND_EXPR, void_type_node,
7230 build2 (MODIFY_EXPR, void_type_node,
7231 build_fold_indirect_ref_loc (loc, arg1),
7232 build1 (IMAGPART_EXPR, type, call)),
7233 build2 (MODIFY_EXPR, void_type_node,
7234 build_fold_indirect_ref_loc (loc, arg2),
7235 build1 (REALPART_EXPR, type, call)));
7238 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7239 NULL_TREE if no simplification can be made. */
7242 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7245 tree realp, imagp, ifn;
7248 if (!validate_arg (arg0, COMPLEX_TYPE)
7249 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7252 /* Calculate the result when the argument is a constant. */
7253 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7256 rtype = TREE_TYPE (TREE_TYPE (arg0));
7258 /* In case we can figure out the real part of arg0 and it is constant zero
7260 if (!TARGET_C99_FUNCTIONS)
7262 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7266 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7267 && real_zerop (realp))
7269 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7270 return build_call_expr_loc (loc, ifn, 1, narg);
7273 /* In case we can easily decompose real and imaginary parts split cexp
7274 to exp (r) * cexpi (i). */
7275 if (flag_unsafe_math_optimizations
7278 tree rfn, rcall, icall;
7280 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7284 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7288 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7289 icall = builtin_save_expr (icall);
7290 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7291 rcall = builtin_save_expr (rcall);
7292 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7293 fold_build2_loc (loc, MULT_EXPR, rtype,
7295 fold_build1_loc (loc, REALPART_EXPR,
7297 fold_build2_loc (loc, MULT_EXPR, rtype,
7299 fold_build1_loc (loc, IMAGPART_EXPR,
7306 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7307 Return NULL_TREE if no simplification can be made. */
7310 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7312 if (!validate_arg (arg, REAL_TYPE))
7315 /* Optimize trunc of constant value. */
7316 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7318 REAL_VALUE_TYPE r, x;
7319 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7321 x = TREE_REAL_CST (arg);
7322 real_trunc (&r, TYPE_MODE (type), &x);
7323 return build_real (type, r);
7326 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7329 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7330 Return NULL_TREE if no simplification can be made. */
7333 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7335 if (!validate_arg (arg, REAL_TYPE))
7338 /* Optimize floor of constant value. */
7339 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7343 x = TREE_REAL_CST (arg);
7344 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7346 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7349 real_floor (&r, TYPE_MODE (type), &x);
7350 return build_real (type, r);
7354 /* Fold floor (x) where x is nonnegative to trunc (x). */
7355 if (tree_expr_nonnegative_p (arg))
7357 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7359 return build_call_expr_loc (loc, truncfn, 1, arg);
7362 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7365 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7366 Return NULL_TREE if no simplification can be made. */
7369 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7371 if (!validate_arg (arg, REAL_TYPE))
7374 /* Optimize ceil of constant value. */
7375 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7379 x = TREE_REAL_CST (arg);
7380 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7382 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7385 real_ceil (&r, TYPE_MODE (type), &x);
7386 return build_real (type, r);
7390 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7393 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7394 Return NULL_TREE if no simplification can be made. */
7397 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7399 if (!validate_arg (arg, REAL_TYPE))
7402 /* Optimize round of constant value. */
7403 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7407 x = TREE_REAL_CST (arg);
7408 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7410 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7413 real_round (&r, TYPE_MODE (type), &x);
7414 return build_real (type, r);
7418 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7421 /* Fold function call to builtin lround, lroundf or lroundl (or the
7422 corresponding long long versions) and other rounding functions. ARG
7423 is the argument to the call. Return NULL_TREE if no simplification
7427 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7429 if (!validate_arg (arg, REAL_TYPE))
7432 /* Optimize lround of constant value. */
7433 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7435 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7437 if (real_isfinite (&x))
7439 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7440 tree ftype = TREE_TYPE (arg);
7441 unsigned HOST_WIDE_INT lo2;
7442 HOST_WIDE_INT hi, lo;
7445 switch (DECL_FUNCTION_CODE (fndecl))
7447 CASE_FLT_FN (BUILT_IN_LFLOOR):
7448 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7449 real_floor (&r, TYPE_MODE (ftype), &x);
7452 CASE_FLT_FN (BUILT_IN_LCEIL):
7453 CASE_FLT_FN (BUILT_IN_LLCEIL):
7454 real_ceil (&r, TYPE_MODE (ftype), &x);
7457 CASE_FLT_FN (BUILT_IN_LROUND):
7458 CASE_FLT_FN (BUILT_IN_LLROUND):
7459 real_round (&r, TYPE_MODE (ftype), &x);
7466 REAL_VALUE_TO_INT (&lo, &hi, r);
7467 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7468 return build_int_cst_wide (itype, lo2, hi);
7472 switch (DECL_FUNCTION_CODE (fndecl))
7474 CASE_FLT_FN (BUILT_IN_LFLOOR):
7475 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7476 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7477 if (tree_expr_nonnegative_p (arg))
7478 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7479 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7484 return fold_fixed_mathfn (loc, fndecl, arg);
7487 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7488 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7489 the argument to the call. Return NULL_TREE if no simplification can
7493 fold_builtin_bitop (tree fndecl, tree arg)
7495 if (!validate_arg (arg, INTEGER_TYPE))
7498 /* Optimize for constant argument. */
7499 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7501 HOST_WIDE_INT hi, width, result;
7502 unsigned HOST_WIDE_INT lo;
7505 type = TREE_TYPE (arg);
7506 width = TYPE_PRECISION (type);
7507 lo = TREE_INT_CST_LOW (arg);
7509 /* Clear all the bits that are beyond the type's precision. */
7510 if (width > HOST_BITS_PER_WIDE_INT)
7512 hi = TREE_INT_CST_HIGH (arg);
7513 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7514 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7519 if (width < HOST_BITS_PER_WIDE_INT)
7520 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7523 switch (DECL_FUNCTION_CODE (fndecl))
7525 CASE_INT_FN (BUILT_IN_FFS):
7527 result = exact_log2 (lo & -lo) + 1;
7529 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7534 CASE_INT_FN (BUILT_IN_CLZ):
7536 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7538 result = width - floor_log2 (lo) - 1;
7539 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7543 CASE_INT_FN (BUILT_IN_CTZ):
7545 result = exact_log2 (lo & -lo);
7547 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7548 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7552 CASE_INT_FN (BUILT_IN_POPCOUNT):
7555 result++, lo &= lo - 1;
7557 result++, hi &= hi - 1;
7560 CASE_INT_FN (BUILT_IN_PARITY):
7563 result++, lo &= lo - 1;
7565 result++, hi &= hi - 1;
7573 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7579 /* Fold function call to builtin_bswap and the long and long long
7580 variants. Return NULL_TREE if no simplification can be made. */
7582 fold_builtin_bswap (tree fndecl, tree arg)
7584 if (! validate_arg (arg, INTEGER_TYPE))
7587 /* Optimize constant value. */
7588 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7590 HOST_WIDE_INT hi, width, r_hi = 0;
7591 unsigned HOST_WIDE_INT lo, r_lo = 0;
7594 type = TREE_TYPE (arg);
7595 width = TYPE_PRECISION (type);
7596 lo = TREE_INT_CST_LOW (arg);
7597 hi = TREE_INT_CST_HIGH (arg);
7599 switch (DECL_FUNCTION_CODE (fndecl))
7601 case BUILT_IN_BSWAP32:
7602 case BUILT_IN_BSWAP64:
7606 for (s = 0; s < width; s += 8)
7608 int d = width - s - 8;
7609 unsigned HOST_WIDE_INT byte;
7611 if (s < HOST_BITS_PER_WIDE_INT)
7612 byte = (lo >> s) & 0xff;
7614 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7616 if (d < HOST_BITS_PER_WIDE_INT)
7619 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7629 if (width < HOST_BITS_PER_WIDE_INT)
7630 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7632 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7638 /* A subroutine of fold_builtin to fold the various logarithmic
7639 functions. Return NULL_TREE if no simplification can me made.
7640 FUNC is the corresponding MPFR logarithm function. */
7643 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7644 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7646 if (validate_arg (arg, REAL_TYPE))
7648 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7650 const enum built_in_function fcode = builtin_mathfn_code (arg);
7652 /* Calculate the result when the argument is a constant. */
7653 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7656 /* Special case, optimize logN(expN(x)) = x. */
7657 if (flag_unsafe_math_optimizations
7658 && ((func == mpfr_log
7659 && (fcode == BUILT_IN_EXP
7660 || fcode == BUILT_IN_EXPF
7661 || fcode == BUILT_IN_EXPL))
7662 || (func == mpfr_log2
7663 && (fcode == BUILT_IN_EXP2
7664 || fcode == BUILT_IN_EXP2F
7665 || fcode == BUILT_IN_EXP2L))
7666 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7667 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7669 /* Optimize logN(func()) for various exponential functions. We
7670 want to determine the value "x" and the power "exponent" in
7671 order to transform logN(x**exponent) into exponent*logN(x). */
7672 if (flag_unsafe_math_optimizations)
7674 tree exponent = 0, x = 0;
7678 CASE_FLT_FN (BUILT_IN_EXP):
7679 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7680 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7682 exponent = CALL_EXPR_ARG (arg, 0);
7684 CASE_FLT_FN (BUILT_IN_EXP2):
7685 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7686 x = build_real (type, dconst2);
7687 exponent = CALL_EXPR_ARG (arg, 0);
7689 CASE_FLT_FN (BUILT_IN_EXP10):
7690 CASE_FLT_FN (BUILT_IN_POW10):
7691 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7693 REAL_VALUE_TYPE dconst10;
7694 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7695 x = build_real (type, dconst10);
7697 exponent = CALL_EXPR_ARG (arg, 0);
7699 CASE_FLT_FN (BUILT_IN_SQRT):
7700 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7701 x = CALL_EXPR_ARG (arg, 0);
7702 exponent = build_real (type, dconsthalf);
7704 CASE_FLT_FN (BUILT_IN_CBRT):
7705 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7706 x = CALL_EXPR_ARG (arg, 0);
7707 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7710 CASE_FLT_FN (BUILT_IN_POW):
7711 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7712 x = CALL_EXPR_ARG (arg, 0);
7713 exponent = CALL_EXPR_ARG (arg, 1);
7719 /* Now perform the optimization. */
7722 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7723 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7731 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7732 NULL_TREE if no simplification can be made. */
7735 fold_builtin_hypot (location_t loc, tree fndecl,
7736 tree arg0, tree arg1, tree type)
7738 tree res, narg0, narg1;
7740 if (!validate_arg (arg0, REAL_TYPE)
7741 || !validate_arg (arg1, REAL_TYPE))
7744 /* Calculate the result when the argument is a constant. */
7745 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7748 /* If either argument to hypot has a negate or abs, strip that off.
7749 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7750 narg0 = fold_strip_sign_ops (arg0);
7751 narg1 = fold_strip_sign_ops (arg1);
7754 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7755 narg1 ? narg1 : arg1);
7758 /* If either argument is zero, hypot is fabs of the other. */
7759 if (real_zerop (arg0))
7760 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7761 else if (real_zerop (arg1))
7762 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7764 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7765 if (flag_unsafe_math_optimizations
7766 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7768 const REAL_VALUE_TYPE sqrt2_trunc
7769 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7770 return fold_build2_loc (loc, MULT_EXPR, type,
7771 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7772 build_real (type, sqrt2_trunc));
7779 /* Fold a builtin function call to pow, powf, or powl. Return
7780 NULL_TREE if no simplification can be made. */
7782 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7786 if (!validate_arg (arg0, REAL_TYPE)
7787 || !validate_arg (arg1, REAL_TYPE))
7790 /* Calculate the result when the argument is a constant. */
7791 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7794 /* Optimize pow(1.0,y) = 1.0. */
7795 if (real_onep (arg0))
7796 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7798 if (TREE_CODE (arg1) == REAL_CST
7799 && !TREE_OVERFLOW (arg1))
7801 REAL_VALUE_TYPE cint;
7805 c = TREE_REAL_CST (arg1);
7807 /* Optimize pow(x,0.0) = 1.0. */
7808 if (REAL_VALUES_EQUAL (c, dconst0))
7809 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7812 /* Optimize pow(x,1.0) = x. */
7813 if (REAL_VALUES_EQUAL (c, dconst1))
7816 /* Optimize pow(x,-1.0) = 1.0/x. */
7817 if (REAL_VALUES_EQUAL (c, dconstm1))
7818 return fold_build2_loc (loc, RDIV_EXPR, type,
7819 build_real (type, dconst1), arg0);
7821 /* Optimize pow(x,0.5) = sqrt(x). */
7822 if (flag_unsafe_math_optimizations
7823 && REAL_VALUES_EQUAL (c, dconsthalf))
7825 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7827 if (sqrtfn != NULL_TREE)
7828 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7831 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7832 if (flag_unsafe_math_optimizations)
7834 const REAL_VALUE_TYPE dconstroot
7835 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7837 if (REAL_VALUES_EQUAL (c, dconstroot))
7839 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7840 if (cbrtfn != NULL_TREE)
7841 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7845 /* Check for an integer exponent. */
7846 n = real_to_integer (&c);
7847 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7848 if (real_identical (&c, &cint))
7850 /* Attempt to evaluate pow at compile-time, unless this should
7851 raise an exception. */
7852 if (TREE_CODE (arg0) == REAL_CST
7853 && !TREE_OVERFLOW (arg0)
7855 || (!flag_trapping_math && !flag_errno_math)
7856 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7861 x = TREE_REAL_CST (arg0);
7862 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7863 if (flag_unsafe_math_optimizations || !inexact)
7864 return build_real (type, x);
7867 /* Strip sign ops from even integer powers. */
7868 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7870 tree narg0 = fold_strip_sign_ops (arg0);
7872 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7877 if (flag_unsafe_math_optimizations)
7879 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7881 /* Optimize pow(expN(x),y) = expN(x*y). */
7882 if (BUILTIN_EXPONENT_P (fcode))
7884 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7885 tree arg = CALL_EXPR_ARG (arg0, 0);
7886 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7887 return build_call_expr_loc (loc, expfn, 1, arg);
7890 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7891 if (BUILTIN_SQRT_P (fcode))
7893 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7894 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7895 build_real (type, dconsthalf));
7896 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7899 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7900 if (BUILTIN_CBRT_P (fcode))
7902 tree arg = CALL_EXPR_ARG (arg0, 0);
7903 if (tree_expr_nonnegative_p (arg))
7905 const REAL_VALUE_TYPE dconstroot
7906 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7907 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7908 build_real (type, dconstroot));
7909 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7913 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7914 if (fcode == BUILT_IN_POW
7915 || fcode == BUILT_IN_POWF
7916 || fcode == BUILT_IN_POWL)
7918 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7919 if (tree_expr_nonnegative_p (arg00))
7921 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7922 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7923 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7931 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7932 Return NULL_TREE if no simplification can be made. */
7934 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7935 tree arg0, tree arg1, tree type)
7937 if (!validate_arg (arg0, REAL_TYPE)
7938 || !validate_arg (arg1, INTEGER_TYPE))
7941 /* Optimize pow(1.0,y) = 1.0. */
7942 if (real_onep (arg0))
7943 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7945 if (host_integerp (arg1, 0))
7947 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7949 /* Evaluate powi at compile-time. */
7950 if (TREE_CODE (arg0) == REAL_CST
7951 && !TREE_OVERFLOW (arg0))
7954 x = TREE_REAL_CST (arg0);
7955 real_powi (&x, TYPE_MODE (type), &x, c);
7956 return build_real (type, x);
7959 /* Optimize pow(x,0) = 1.0. */
7961 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7964 /* Optimize pow(x,1) = x. */
7968 /* Optimize pow(x,-1) = 1.0/x. */
7970 return fold_build2_loc (loc, RDIV_EXPR, type,
7971 build_real (type, dconst1), arg0);
7977 /* A subroutine of fold_builtin to fold the various exponent
7978 functions. Return NULL_TREE if no simplification can be made.
7979 FUNC is the corresponding MPFR exponent function. */
7982 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7983 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7985 if (validate_arg (arg, REAL_TYPE))
7987 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7990 /* Calculate the result when the argument is a constant. */
7991 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7994 /* Optimize expN(logN(x)) = x. */
7995 if (flag_unsafe_math_optimizations)
7997 const enum built_in_function fcode = builtin_mathfn_code (arg);
7999 if ((func == mpfr_exp
8000 && (fcode == BUILT_IN_LOG
8001 || fcode == BUILT_IN_LOGF
8002 || fcode == BUILT_IN_LOGL))
8003 || (func == mpfr_exp2
8004 && (fcode == BUILT_IN_LOG2
8005 || fcode == BUILT_IN_LOG2F
8006 || fcode == BUILT_IN_LOG2L))
8007 || (func == mpfr_exp10
8008 && (fcode == BUILT_IN_LOG10
8009 || fcode == BUILT_IN_LOG10F
8010 || fcode == BUILT_IN_LOG10L)))
8011 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8018 /* Return true if VAR is a VAR_DECL or a component thereof. */
8021 var_decl_component_p (tree var)
8024 while (handled_component_p (inner))
8025 inner = TREE_OPERAND (inner, 0);
8026 return SSA_VAR_P (inner);
8029 /* Fold function call to builtin memset. Return
8030 NULL_TREE if no simplification can be made. */
8033 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8034 tree type, bool ignore)
8036 tree var, ret, etype;
8037 unsigned HOST_WIDE_INT length, cval;
8039 if (! validate_arg (dest, POINTER_TYPE)
8040 || ! validate_arg (c, INTEGER_TYPE)
8041 || ! validate_arg (len, INTEGER_TYPE))
8044 if (! host_integerp (len, 1))
8047 /* If the LEN parameter is zero, return DEST. */
8048 if (integer_zerop (len))
8049 return omit_one_operand_loc (loc, type, dest, c);
8051 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8056 if (TREE_CODE (var) != ADDR_EXPR)
8059 var = TREE_OPERAND (var, 0);
8060 if (TREE_THIS_VOLATILE (var))
8063 etype = TREE_TYPE (var);
8064 if (TREE_CODE (etype) == ARRAY_TYPE)
8065 etype = TREE_TYPE (etype);
8067 if (!INTEGRAL_TYPE_P (etype)
8068 && !POINTER_TYPE_P (etype))
8071 if (! var_decl_component_p (var))
8074 length = tree_low_cst (len, 1);
8075 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8076 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8080 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8083 if (integer_zerop (c))
8087 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8090 cval = tree_low_cst (c, 1);
8094 cval |= (cval << 31) << 1;
8097 ret = build_int_cst_type (etype, cval);
8098 var = build_fold_indirect_ref_loc (loc,
8099 fold_convert_loc (loc,
8100 build_pointer_type (etype),
8102 ret = build2 (MODIFY_EXPR, etype, var, ret);
8106 return omit_one_operand_loc (loc, type, dest, ret);
8109 /* Fold function call to builtin memset. Return
8110 NULL_TREE if no simplification can be made. */
8113 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8115 if (! validate_arg (dest, POINTER_TYPE)
8116 || ! validate_arg (size, INTEGER_TYPE))
8122 /* New argument list transforming bzero(ptr x, int y) to
8123 memset(ptr x, int 0, size_t y). This is done this way
8124 so that if it isn't expanded inline, we fallback to
8125 calling bzero instead of memset. */
8127 return fold_builtin_memset (loc, dest, integer_zero_node,
8128 fold_convert_loc (loc, sizetype, size),
8129 void_type_node, ignore);
8132 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8133 NULL_TREE if no simplification can be made.
8134 If ENDP is 0, return DEST (like memcpy).
8135 If ENDP is 1, return DEST+LEN (like mempcpy).
8136 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8137 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8141 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8142 tree len, tree type, bool ignore, int endp)
8144 tree destvar, srcvar, expr;
8146 if (! validate_arg (dest, POINTER_TYPE)
8147 || ! validate_arg (src, POINTER_TYPE)
8148 || ! validate_arg (len, INTEGER_TYPE))
8151 /* If the LEN parameter is zero, return DEST. */
8152 if (integer_zerop (len))
8153 return omit_one_operand_loc (loc, type, dest, src);
8155 /* If SRC and DEST are the same (and not volatile), return
8156 DEST{,+LEN,+LEN-1}. */
8157 if (operand_equal_p (src, dest, 0))
8161 tree srctype, desttype;
8162 int src_align, dest_align;
8166 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8167 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8169 /* Both DEST and SRC must be pointer types.
8170 ??? This is what old code did. Is the testing for pointer types
8173 If either SRC is readonly or length is 1, we can use memcpy. */
8174 if (!dest_align || !src_align)
8176 if (readonly_data_expr (src)
8177 || (host_integerp (len, 1)
8178 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8179 >= tree_low_cst (len, 1))))
8181 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8184 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8187 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8188 srcvar = build_fold_indirect_ref_loc (loc, src);
8189 destvar = build_fold_indirect_ref_loc (loc, dest);
8191 && !TREE_THIS_VOLATILE (srcvar)
8193 && !TREE_THIS_VOLATILE (destvar))
8195 tree src_base, dest_base, fn;
8196 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8197 HOST_WIDE_INT size = -1;
8198 HOST_WIDE_INT maxsize = -1;
8201 if (handled_component_p (src_base))
8202 src_base = get_ref_base_and_extent (src_base, &src_offset,
8204 dest_base = destvar;
8205 if (handled_component_p (dest_base))
8206 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8208 if (host_integerp (len, 1))
8210 maxsize = tree_low_cst (len, 1);
8212 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8215 maxsize *= BITS_PER_UNIT;
8219 if (SSA_VAR_P (src_base)
8220 && SSA_VAR_P (dest_base))
8222 if (operand_equal_p (src_base, dest_base, 0)
8223 && ranges_overlap_p (src_offset, maxsize,
8224 dest_offset, maxsize))
8227 else if (TREE_CODE (src_base) == INDIRECT_REF
8228 && TREE_CODE (dest_base) == INDIRECT_REF)
8230 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8231 TREE_OPERAND (dest_base, 0), 0)
8232 || ranges_overlap_p (src_offset, maxsize,
8233 dest_offset, maxsize))
8239 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8242 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8247 if (!host_integerp (len, 0))
8250 This logic lose for arguments like (type *)malloc (sizeof (type)),
8251 since we strip the casts of up to VOID return value from malloc.
8252 Perhaps we ought to inherit type from non-VOID argument here? */
8255 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8256 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8258 tree tem = TREE_OPERAND (src, 0);
8260 if (tem != TREE_OPERAND (src, 0))
8261 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8263 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8265 tree tem = TREE_OPERAND (dest, 0);
8267 if (tem != TREE_OPERAND (dest, 0))
8268 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8270 srctype = TREE_TYPE (TREE_TYPE (src));
8272 && TREE_CODE (srctype) == ARRAY_TYPE
8273 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8275 srctype = TREE_TYPE (srctype);
8277 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8279 desttype = TREE_TYPE (TREE_TYPE (dest));
8281 && TREE_CODE (desttype) == ARRAY_TYPE
8282 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8284 desttype = TREE_TYPE (desttype);
8286 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8288 if (!srctype || !desttype
8289 || !TYPE_SIZE_UNIT (srctype)
8290 || !TYPE_SIZE_UNIT (desttype)
8291 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8292 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8293 || TYPE_VOLATILE (srctype)
8294 || TYPE_VOLATILE (desttype))
8297 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8298 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8299 if (dest_align < (int) TYPE_ALIGN (desttype)
8300 || src_align < (int) TYPE_ALIGN (srctype))
8304 dest = builtin_save_expr (dest);
8307 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8309 srcvar = build_fold_indirect_ref_loc (loc, src);
8310 if (TREE_THIS_VOLATILE (srcvar))
8312 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8314 /* With memcpy, it is possible to bypass aliasing rules, so without
8315 this check i.e. execute/20060930-2.c would be misoptimized,
8316 because it use conflicting alias set to hold argument for the
8317 memcpy call. This check is probably unnecessary with
8318 -fno-strict-aliasing. Similarly for destvar. See also
8320 else if (!var_decl_component_p (srcvar))
8324 destvar = NULL_TREE;
8325 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8327 destvar = build_fold_indirect_ref_loc (loc, dest);
8328 if (TREE_THIS_VOLATILE (destvar))
8330 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8331 destvar = NULL_TREE;
8332 else if (!var_decl_component_p (destvar))
8333 destvar = NULL_TREE;
8336 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8339 if (srcvar == NULL_TREE)
8342 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8345 srctype = build_qualified_type (desttype, 0);
8346 if (src_align < (int) TYPE_ALIGN (srctype))
8348 if (AGGREGATE_TYPE_P (srctype)
8349 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8352 srctype = build_variant_type_copy (srctype);
8353 TYPE_ALIGN (srctype) = src_align;
8354 TYPE_USER_ALIGN (srctype) = 1;
8355 TYPE_PACKED (srctype) = 1;
8357 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8358 src = fold_convert_loc (loc, srcptype, src);
8359 srcvar = build_fold_indirect_ref_loc (loc, src);
8361 else if (destvar == NULL_TREE)
8364 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8367 desttype = build_qualified_type (srctype, 0);
8368 if (dest_align < (int) TYPE_ALIGN (desttype))
8370 if (AGGREGATE_TYPE_P (desttype)
8371 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8374 desttype = build_variant_type_copy (desttype);
8375 TYPE_ALIGN (desttype) = dest_align;
8376 TYPE_USER_ALIGN (desttype) = 1;
8377 TYPE_PACKED (desttype) = 1;
8379 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8380 dest = fold_convert_loc (loc, destptype, dest);
8381 destvar = build_fold_indirect_ref_loc (loc, dest);
8384 if (srctype == desttype
8385 || (gimple_in_ssa_p (cfun)
8386 && useless_type_conversion_p (desttype, srctype)))
8388 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8389 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8390 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8391 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8392 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8394 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8395 TREE_TYPE (destvar), srcvar);
8396 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8402 if (endp == 0 || endp == 3)
8403 return omit_one_operand_loc (loc, type, dest, expr);
8409 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8412 len = fold_convert_loc (loc, sizetype, len);
8413 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8414 dest = fold_convert_loc (loc, type, dest);
8416 dest = omit_one_operand_loc (loc, type, dest, expr);
8420 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8421 If LEN is not NULL, it represents the length of the string to be
8422 copied. Return NULL_TREE if no simplification can be made. */
8425 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8429 if (!validate_arg (dest, POINTER_TYPE)
8430 || !validate_arg (src, POINTER_TYPE))
8433 /* If SRC and DEST are the same (and not volatile), return DEST. */
8434 if (operand_equal_p (src, dest, 0))
8435 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8437 if (optimize_function_for_size_p (cfun))
8440 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8446 len = c_strlen (src, 1);
8447 if (! len || TREE_SIDE_EFFECTS (len))
8451 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8452 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8453 build_call_expr_loc (loc, fn, 3, dest, src, len));
8456 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8457 Return NULL_TREE if no simplification can be made. */
8460 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8462 tree fn, len, lenp1, call, type;
8464 if (!validate_arg (dest, POINTER_TYPE)
8465 || !validate_arg (src, POINTER_TYPE))
8468 len = c_strlen (src, 1);
8470 || TREE_CODE (len) != INTEGER_CST)
8473 if (optimize_function_for_size_p (cfun)
8474 /* If length is zero it's small enough. */
8475 && !integer_zerop (len))
8478 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8482 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8483 /* We use dest twice in building our expression. Save it from
8484 multiple expansions. */
8485 dest = builtin_save_expr (dest);
8486 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8488 type = TREE_TYPE (TREE_TYPE (fndecl));
8489 len = fold_convert_loc (loc, sizetype, len);
8490 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8491 dest = fold_convert_loc (loc, type, dest);
8492 dest = omit_one_operand_loc (loc, type, dest, call);
8496 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8497 If SLEN is not NULL, it represents the length of the source string.
8498 Return NULL_TREE if no simplification can be made. */
8501 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8502 tree src, tree len, tree slen)
8506 if (!validate_arg (dest, POINTER_TYPE)
8507 || !validate_arg (src, POINTER_TYPE)
8508 || !validate_arg (len, INTEGER_TYPE))
8511 /* If the LEN parameter is zero, return DEST. */
8512 if (integer_zerop (len))
8513 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8515 /* We can't compare slen with len as constants below if len is not a
8517 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8521 slen = c_strlen (src, 1);
8523 /* Now, we must be passed a constant src ptr parameter. */
8524 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8527 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8529 /* We do not support simplification of this case, though we do
8530 support it when expanding trees into RTL. */
8531 /* FIXME: generate a call to __builtin_memset. */
8532 if (tree_int_cst_lt (slen, len))
8535 /* OK transform into builtin memcpy. */
8536 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8539 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8540 build_call_expr_loc (loc, fn, 3, dest, src, len));
8543 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8544 arguments to the call, and TYPE is its return type.
8545 Return NULL_TREE if no simplification can be made. */
8548 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8550 if (!validate_arg (arg1, POINTER_TYPE)
8551 || !validate_arg (arg2, INTEGER_TYPE)
8552 || !validate_arg (len, INTEGER_TYPE))
8558 if (TREE_CODE (arg2) != INTEGER_CST
8559 || !host_integerp (len, 1))
8562 p1 = c_getstr (arg1);
8563 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8569 if (target_char_cast (arg2, &c))
8572 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8575 return build_int_cst (TREE_TYPE (arg1), 0);
8577 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8579 return fold_convert_loc (loc, type, tem);
8585 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8586 Return NULL_TREE if no simplification can be made. */
8589 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8591 const char *p1, *p2;
8593 if (!validate_arg (arg1, POINTER_TYPE)
8594 || !validate_arg (arg2, POINTER_TYPE)
8595 || !validate_arg (len, INTEGER_TYPE))
8598 /* If the LEN parameter is zero, return zero. */
8599 if (integer_zerop (len))
8600 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8603 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8604 if (operand_equal_p (arg1, arg2, 0))
8605 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8607 p1 = c_getstr (arg1);
8608 p2 = c_getstr (arg2);
8610 /* If all arguments are constant, and the value of len is not greater
8611 than the lengths of arg1 and arg2, evaluate at compile-time. */
8612 if (host_integerp (len, 1) && p1 && p2
8613 && compare_tree_int (len, strlen (p1) + 1) <= 0
8614 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8616 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8619 return integer_one_node;
8621 return integer_minus_one_node;
8623 return integer_zero_node;
8626 /* If len parameter is one, return an expression corresponding to
8627 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8628 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8630 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8631 tree cst_uchar_ptr_node
8632 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8635 = fold_convert_loc (loc, integer_type_node,
8636 build1 (INDIRECT_REF, cst_uchar_node,
8637 fold_convert_loc (loc,
8641 = fold_convert_loc (loc, integer_type_node,
8642 build1 (INDIRECT_REF, cst_uchar_node,
8643 fold_convert_loc (loc,
8646 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8652 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8653 Return NULL_TREE if no simplification can be made. */
8656 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8658 const char *p1, *p2;
8660 if (!validate_arg (arg1, POINTER_TYPE)
8661 || !validate_arg (arg2, POINTER_TYPE))
8664 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8665 if (operand_equal_p (arg1, arg2, 0))
8666 return integer_zero_node;
8668 p1 = c_getstr (arg1);
8669 p2 = c_getstr (arg2);
8673 const int i = strcmp (p1, p2);
8675 return integer_minus_one_node;
8677 return integer_one_node;
8679 return integer_zero_node;
8682 /* If the second arg is "", return *(const unsigned char*)arg1. */
8683 if (p2 && *p2 == '\0')
8685 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8686 tree cst_uchar_ptr_node
8687 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8689 return fold_convert_loc (loc, integer_type_node,
8690 build1 (INDIRECT_REF, cst_uchar_node,
8691 fold_convert_loc (loc,
8696 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8697 if (p1 && *p1 == '\0')
8699 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8700 tree cst_uchar_ptr_node
8701 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8704 = fold_convert_loc (loc, integer_type_node,
8705 build1 (INDIRECT_REF, cst_uchar_node,
8706 fold_convert_loc (loc,
8709 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8715 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8716 Return NULL_TREE if no simplification can be made. */
8719 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8721 const char *p1, *p2;
8723 if (!validate_arg (arg1, POINTER_TYPE)
8724 || !validate_arg (arg2, POINTER_TYPE)
8725 || !validate_arg (len, INTEGER_TYPE))
8728 /* If the LEN parameter is zero, return zero. */
8729 if (integer_zerop (len))
8730 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8733 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8734 if (operand_equal_p (arg1, arg2, 0))
8735 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8737 p1 = c_getstr (arg1);
8738 p2 = c_getstr (arg2);
8740 if (host_integerp (len, 1) && p1 && p2)
8742 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8744 return integer_one_node;
8746 return integer_minus_one_node;
8748 return integer_zero_node;
8751 /* If the second arg is "", and the length is greater than zero,
8752 return *(const unsigned char*)arg1. */
8753 if (p2 && *p2 == '\0'
8754 && TREE_CODE (len) == INTEGER_CST
8755 && tree_int_cst_sgn (len) == 1)
8757 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8758 tree cst_uchar_ptr_node
8759 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8761 return fold_convert_loc (loc, integer_type_node,
8762 build1 (INDIRECT_REF, cst_uchar_node,
8763 fold_convert_loc (loc,
8768 /* If the first arg is "", and the length is greater than zero,
8769 return -*(const unsigned char*)arg2. */
8770 if (p1 && *p1 == '\0'
8771 && TREE_CODE (len) == INTEGER_CST
8772 && tree_int_cst_sgn (len) == 1)
8774 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8775 tree cst_uchar_ptr_node
8776 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8778 tree temp = fold_convert_loc (loc, integer_type_node,
8779 build1 (INDIRECT_REF, cst_uchar_node,
8780 fold_convert_loc (loc,
8783 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8786 /* If len parameter is one, return an expression corresponding to
8787 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8788 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8790 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8791 tree cst_uchar_ptr_node
8792 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8794 tree ind1 = fold_convert_loc (loc, integer_type_node,
8795 build1 (INDIRECT_REF, cst_uchar_node,
8796 fold_convert_loc (loc,
8799 tree ind2 = fold_convert_loc (loc, integer_type_node,
8800 build1 (INDIRECT_REF, cst_uchar_node,
8801 fold_convert_loc (loc,
8804 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8810 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8811 ARG. Return NULL_TREE if no simplification can be made. */
8814 fold_builtin_signbit (location_t loc, tree arg, tree type)
8818 if (!validate_arg (arg, REAL_TYPE))
8821 /* If ARG is a compile-time constant, determine the result. */
8822 if (TREE_CODE (arg) == REAL_CST
8823 && !TREE_OVERFLOW (arg))
8827 c = TREE_REAL_CST (arg);
8828 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8829 return fold_convert_loc (loc, type, temp);
8832 /* If ARG is non-negative, the result is always zero. */
8833 if (tree_expr_nonnegative_p (arg))
8834 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8836 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8837 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8838 return fold_build2_loc (loc, LT_EXPR, type, arg,
8839 build_real (TREE_TYPE (arg), dconst0));
8844 /* Fold function call to builtin copysign, copysignf or copysignl with
8845 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8849 fold_builtin_copysign (location_t loc, tree fndecl,
8850 tree arg1, tree arg2, tree type)
8854 if (!validate_arg (arg1, REAL_TYPE)
8855 || !validate_arg (arg2, REAL_TYPE))
8858 /* copysign(X,X) is X. */
8859 if (operand_equal_p (arg1, arg2, 0))
8860 return fold_convert_loc (loc, type, arg1);
8862 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8863 if (TREE_CODE (arg1) == REAL_CST
8864 && TREE_CODE (arg2) == REAL_CST
8865 && !TREE_OVERFLOW (arg1)
8866 && !TREE_OVERFLOW (arg2))
8868 REAL_VALUE_TYPE c1, c2;
8870 c1 = TREE_REAL_CST (arg1);
8871 c2 = TREE_REAL_CST (arg2);
8872 /* c1.sign := c2.sign. */
8873 real_copysign (&c1, &c2);
8874 return build_real (type, c1);
8877 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8878 Remember to evaluate Y for side-effects. */
8879 if (tree_expr_nonnegative_p (arg2))
8880 return omit_one_operand_loc (loc, type,
8881 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8884 /* Strip sign changing operations for the first argument. */
8885 tem = fold_strip_sign_ops (arg1);
8887 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8892 /* Fold a call to builtin isascii with argument ARG. */
8895 fold_builtin_isascii (location_t loc, tree arg)
8897 if (!validate_arg (arg, INTEGER_TYPE))
8901 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8902 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8903 build_int_cst (NULL_TREE,
8904 ~ (unsigned HOST_WIDE_INT) 0x7f));
8905 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8906 arg, integer_zero_node);
8910 /* Fold a call to builtin toascii with argument ARG. */
8913 fold_builtin_toascii (location_t loc, tree arg)
8915 if (!validate_arg (arg, INTEGER_TYPE))
8918 /* Transform toascii(c) -> (c & 0x7f). */
8919 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8920 build_int_cst (NULL_TREE, 0x7f));
8923 /* Fold a call to builtin isdigit with argument ARG. */
8926 fold_builtin_isdigit (location_t loc, tree arg)
8928 if (!validate_arg (arg, INTEGER_TYPE))
8932 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8933 /* According to the C standard, isdigit is unaffected by locale.
8934 However, it definitely is affected by the target character set. */
8935 unsigned HOST_WIDE_INT target_digit0
8936 = lang_hooks.to_target_charset ('0');
8938 if (target_digit0 == 0)
8941 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8942 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8943 build_int_cst (unsigned_type_node, target_digit0));
8944 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8945 build_int_cst (unsigned_type_node, 9));
8949 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8952 fold_builtin_fabs (location_t loc, tree arg, tree type)
8954 if (!validate_arg (arg, REAL_TYPE))
8957 arg = fold_convert_loc (loc, type, arg);
8958 if (TREE_CODE (arg) == REAL_CST)
8959 return fold_abs_const (arg, type);
8960 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8963 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8966 fold_builtin_abs (location_t loc, tree arg, tree type)
8968 if (!validate_arg (arg, INTEGER_TYPE))
8971 arg = fold_convert_loc (loc, type, arg);
8972 if (TREE_CODE (arg) == INTEGER_CST)
8973 return fold_abs_const (arg, type);
8974 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8977 /* Fold a call to builtin fmin or fmax. */
8980 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8981 tree type, bool max)
8983 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8985 /* Calculate the result when the argument is a constant. */
8986 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8991 /* If either argument is NaN, return the other one. Avoid the
8992 transformation if we get (and honor) a signalling NaN. Using
8993 omit_one_operand() ensures we create a non-lvalue. */
8994 if (TREE_CODE (arg0) == REAL_CST
8995 && real_isnan (&TREE_REAL_CST (arg0))
8996 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8997 || ! TREE_REAL_CST (arg0).signalling))
8998 return omit_one_operand_loc (loc, type, arg1, arg0);
8999 if (TREE_CODE (arg1) == REAL_CST
9000 && real_isnan (&TREE_REAL_CST (arg1))
9001 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9002 || ! TREE_REAL_CST (arg1).signalling))
9003 return omit_one_operand_loc (loc, type, arg0, arg1);
9005 /* Transform fmin/fmax(x,x) -> x. */
9006 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9007 return omit_one_operand_loc (loc, type, arg0, arg1);
9009 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9010 functions to return the numeric arg if the other one is NaN.
9011 These tree codes don't honor that, so only transform if
9012 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9013 handled, so we don't have to worry about it either. */
9014 if (flag_finite_math_only)
9015 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9016 fold_convert_loc (loc, type, arg0),
9017 fold_convert_loc (loc, type, arg1));
9022 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9025 fold_builtin_carg (location_t loc, tree arg, tree type)
9027 if (validate_arg (arg, COMPLEX_TYPE)
9028 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9030 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9034 tree new_arg = builtin_save_expr (arg);
9035 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9036 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9037 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9044 /* Fold a call to builtin logb/ilogb. */
9047 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9049 if (! validate_arg (arg, REAL_TYPE))
9054 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9056 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9062 /* If arg is Inf or NaN and we're logb, return it. */
9063 if (TREE_CODE (rettype) == REAL_TYPE)
9064 return fold_convert_loc (loc, rettype, arg);
9065 /* Fall through... */
9067 /* Zero may set errno and/or raise an exception for logb, also
9068 for ilogb we don't know FP_ILOGB0. */
9071 /* For normal numbers, proceed iff radix == 2. In GCC,
9072 normalized significands are in the range [0.5, 1.0). We
9073 want the exponent as if they were [1.0, 2.0) so get the
9074 exponent and subtract 1. */
9075 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9076 return fold_convert_loc (loc, rettype,
9077 build_int_cst (NULL_TREE,
9078 REAL_EXP (value)-1));
9086 /* Fold a call to builtin significand, if radix == 2. */
9089 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9091 if (! validate_arg (arg, REAL_TYPE))
9096 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9098 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9105 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9106 return fold_convert_loc (loc, rettype, arg);
9108 /* For normal numbers, proceed iff radix == 2. */
9109 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9111 REAL_VALUE_TYPE result = *value;
9112 /* In GCC, normalized significands are in the range [0.5,
9113 1.0). We want them to be [1.0, 2.0) so set the
9115 SET_REAL_EXP (&result, 1);
9116 return build_real (rettype, result);
9125 /* Fold a call to builtin frexp, we can assume the base is 2. */
9128 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9130 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9135 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9138 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9140 /* Proceed if a valid pointer type was passed in. */
9141 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9143 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9149 /* For +-0, return (*exp = 0, +-0). */
9150 exp = integer_zero_node;
9155 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9156 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9159 /* Since the frexp function always expects base 2, and in
9160 GCC normalized significands are already in the range
9161 [0.5, 1.0), we have exactly what frexp wants. */
9162 REAL_VALUE_TYPE frac_rvt = *value;
9163 SET_REAL_EXP (&frac_rvt, 0);
9164 frac = build_real (rettype, frac_rvt);
9165 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9172 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9173 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9174 TREE_SIDE_EFFECTS (arg1) = 1;
9175 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9181 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9182 then we can assume the base is two. If it's false, then we have to
9183 check the mode of the TYPE parameter in certain cases. */
9186 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9187 tree type, bool ldexp)
9189 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9194 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9195 if (real_zerop (arg0) || integer_zerop (arg1)
9196 || (TREE_CODE (arg0) == REAL_CST
9197 && !real_isfinite (&TREE_REAL_CST (arg0))))
9198 return omit_one_operand_loc (loc, type, arg0, arg1);
9200 /* If both arguments are constant, then try to evaluate it. */
9201 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9202 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9203 && host_integerp (arg1, 0))
9205 /* Bound the maximum adjustment to twice the range of the
9206 mode's valid exponents. Use abs to ensure the range is
9207 positive as a sanity check. */
9208 const long max_exp_adj = 2 *
9209 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9210 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9212 /* Get the user-requested adjustment. */
9213 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9215 /* The requested adjustment must be inside this range. This
9216 is a preliminary cap to avoid things like overflow, we
9217 may still fail to compute the result for other reasons. */
9218 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9220 REAL_VALUE_TYPE initial_result;
9222 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9224 /* Ensure we didn't overflow. */
9225 if (! real_isinf (&initial_result))
9227 const REAL_VALUE_TYPE trunc_result
9228 = real_value_truncate (TYPE_MODE (type), initial_result);
9230 /* Only proceed if the target mode can hold the
9232 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9233 return build_real (type, trunc_result);
9242 /* Fold a call to builtin modf. */
9245 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9247 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9252 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9255 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9257 /* Proceed if a valid pointer type was passed in. */
9258 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9260 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9261 REAL_VALUE_TYPE trunc, frac;
9267 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9268 trunc = frac = *value;
9271 /* For +-Inf, return (*arg1 = arg0, +-0). */
9273 frac.sign = value->sign;
9277 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9278 real_trunc (&trunc, VOIDmode, value);
9279 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9280 /* If the original number was negative and already
9281 integral, then the fractional part is -0.0. */
9282 if (value->sign && frac.cl == rvc_zero)
9283 frac.sign = value->sign;
9287 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9288 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9289 build_real (rettype, trunc));
9290 TREE_SIDE_EFFECTS (arg1) = 1;
9291 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9292 build_real (rettype, frac));
9298 /* Given a location LOC, an interclass builtin function decl FNDECL
9299 and its single argument ARG, return an folded expression computing
9300 the same, or NULL_TREE if we either couldn't or didn't want to fold
9301 (the latter happen if there's an RTL instruction available). */
9304 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9306 enum machine_mode mode;
9308 if (!validate_arg (arg, REAL_TYPE))
9311 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9314 mode = TYPE_MODE (TREE_TYPE (arg));
9316 /* If there is no optab, try generic code. */
9317 switch (DECL_FUNCTION_CODE (fndecl))
9321 CASE_FLT_FN (BUILT_IN_ISINF):
9323 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9324 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9325 tree const type = TREE_TYPE (arg);
9329 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9330 real_from_string (&r, buf);
9331 result = build_call_expr (isgr_fn, 2,
9332 fold_build1_loc (loc, ABS_EXPR, type, arg),
9333 build_real (type, r));
9336 CASE_FLT_FN (BUILT_IN_FINITE):
9337 case BUILT_IN_ISFINITE:
9339 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9340 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9341 tree const type = TREE_TYPE (arg);
9345 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9346 real_from_string (&r, buf);
9347 result = build_call_expr (isle_fn, 2,
9348 fold_build1_loc (loc, ABS_EXPR, type, arg),
9349 build_real (type, r));
9350 /*result = fold_build2_loc (loc, UNGT_EXPR,
9351 TREE_TYPE (TREE_TYPE (fndecl)),
9352 fold_build1_loc (loc, ABS_EXPR, type, arg),
9353 build_real (type, r));
9354 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9355 TREE_TYPE (TREE_TYPE (fndecl)),
9359 case BUILT_IN_ISNORMAL:
9361 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9362 islessequal(fabs(x),DBL_MAX). */
9363 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9364 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9365 tree const type = TREE_TYPE (arg);
9366 REAL_VALUE_TYPE rmax, rmin;
9369 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9370 real_from_string (&rmax, buf);
9371 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9372 real_from_string (&rmin, buf);
9373 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9374 result = build_call_expr (isle_fn, 2, arg,
9375 build_real (type, rmax));
9376 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9377 build_call_expr (isge_fn, 2, arg,
9378 build_real (type, rmin)));
9388 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9389 ARG is the argument for the call. */
9392 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9394 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9397 if (!validate_arg (arg, REAL_TYPE))
9400 switch (builtin_index)
9402 case BUILT_IN_ISINF:
9403 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9404 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9406 if (TREE_CODE (arg) == REAL_CST)
9408 r = TREE_REAL_CST (arg);
9409 if (real_isinf (&r))
9410 return real_compare (GT_EXPR, &r, &dconst0)
9411 ? integer_one_node : integer_minus_one_node;
9413 return integer_zero_node;
9418 case BUILT_IN_ISINF_SIGN:
9420 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9421 /* In a boolean context, GCC will fold the inner COND_EXPR to
9422 1. So e.g. "if (isinf_sign(x))" would be folded to just
9423 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9424 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9425 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9426 tree tmp = NULL_TREE;
9428 arg = builtin_save_expr (arg);
9430 if (signbit_fn && isinf_fn)
9432 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9433 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9435 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9436 signbit_call, integer_zero_node);
9437 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9438 isinf_call, integer_zero_node);
9440 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9441 integer_minus_one_node, integer_one_node);
9442 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9450 case BUILT_IN_ISFINITE:
9451 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9452 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9453 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9455 if (TREE_CODE (arg) == REAL_CST)
9457 r = TREE_REAL_CST (arg);
9458 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9463 case BUILT_IN_ISNAN:
9464 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9465 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9467 if (TREE_CODE (arg) == REAL_CST)
9469 r = TREE_REAL_CST (arg);
9470 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9473 arg = builtin_save_expr (arg);
9474 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9481 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9482 This builtin will generate code to return the appropriate floating
9483 point classification depending on the value of the floating point
9484 number passed in. The possible return values must be supplied as
9485 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9486 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9487 one floating point argument which is "type generic". */
9490 fold_builtin_fpclassify (location_t loc, tree exp)
9492 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9493 arg, type, res, tmp;
9494 enum machine_mode mode;
9498 /* Verify the required arguments in the original call. */
9499 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9500 INTEGER_TYPE, INTEGER_TYPE,
9501 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9504 fp_nan = CALL_EXPR_ARG (exp, 0);
9505 fp_infinite = CALL_EXPR_ARG (exp, 1);
9506 fp_normal = CALL_EXPR_ARG (exp, 2);
9507 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9508 fp_zero = CALL_EXPR_ARG (exp, 4);
9509 arg = CALL_EXPR_ARG (exp, 5);
9510 type = TREE_TYPE (arg);
9511 mode = TYPE_MODE (type);
9512 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9516 (fabs(x) == Inf ? FP_INFINITE :
9517 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9518 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9520 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9521 build_real (type, dconst0));
9522 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9523 tmp, fp_zero, fp_subnormal);
9525 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9526 real_from_string (&r, buf);
9527 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9528 arg, build_real (type, r));
9529 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9531 if (HONOR_INFINITIES (mode))
9534 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9535 build_real (type, r));
9536 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9540 if (HONOR_NANS (mode))
9542 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9543 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9549 /* Fold a call to an unordered comparison function such as
9550 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9551 being called and ARG0 and ARG1 are the arguments for the call.
9552 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9553 the opposite of the desired result. UNORDERED_CODE is used
9554 for modes that can hold NaNs and ORDERED_CODE is used for
9558 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9559 enum tree_code unordered_code,
9560 enum tree_code ordered_code)
9562 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9563 enum tree_code code;
9565 enum tree_code code0, code1;
9566 tree cmp_type = NULL_TREE;
9568 type0 = TREE_TYPE (arg0);
9569 type1 = TREE_TYPE (arg1);
9571 code0 = TREE_CODE (type0);
9572 code1 = TREE_CODE (type1);
9574 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9575 /* Choose the wider of two real types. */
9576 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9578 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9580 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9583 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9584 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9586 if (unordered_code == UNORDERED_EXPR)
9588 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9589 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9590 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9593 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9595 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9596 fold_build2_loc (loc, code, type, arg0, arg1));
9599 /* Fold a call to built-in function FNDECL with 0 arguments.
9600 IGNORE is true if the result of the function call is ignored. This
9601 function returns NULL_TREE if no simplification was possible. */
9604 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9606 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9607 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9610 CASE_FLT_FN (BUILT_IN_INF):
9611 case BUILT_IN_INFD32:
9612 case BUILT_IN_INFD64:
9613 case BUILT_IN_INFD128:
9614 return fold_builtin_inf (loc, type, true);
9616 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9617 return fold_builtin_inf (loc, type, false);
9619 case BUILT_IN_CLASSIFY_TYPE:
9620 return fold_builtin_classify_type (NULL_TREE);
9628 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9629 IGNORE is true if the result of the function call is ignored. This
9630 function returns NULL_TREE if no simplification was possible. */
9633 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9635 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9636 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9640 case BUILT_IN_CONSTANT_P:
9642 tree val = fold_builtin_constant_p (arg0);
9644 /* Gimplification will pull the CALL_EXPR for the builtin out of
9645 an if condition. When not optimizing, we'll not CSE it back.
9646 To avoid link error types of regressions, return false now. */
9647 if (!val && !optimize)
9648 val = integer_zero_node;
9653 case BUILT_IN_CLASSIFY_TYPE:
9654 return fold_builtin_classify_type (arg0);
9656 case BUILT_IN_STRLEN:
9657 return fold_builtin_strlen (loc, type, arg0);
9659 CASE_FLT_FN (BUILT_IN_FABS):
9660 return fold_builtin_fabs (loc, arg0, type);
9664 case BUILT_IN_LLABS:
9665 case BUILT_IN_IMAXABS:
9666 return fold_builtin_abs (loc, arg0, type);
9668 CASE_FLT_FN (BUILT_IN_CONJ):
9669 if (validate_arg (arg0, COMPLEX_TYPE)
9670 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9671 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9674 CASE_FLT_FN (BUILT_IN_CREAL):
9675 if (validate_arg (arg0, COMPLEX_TYPE)
9676 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9677 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9680 CASE_FLT_FN (BUILT_IN_CIMAG):
9681 if (validate_arg (arg0, COMPLEX_TYPE)
9682 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9683 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9686 CASE_FLT_FN (BUILT_IN_CCOS):
9687 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9689 CASE_FLT_FN (BUILT_IN_CCOSH):
9690 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9692 CASE_FLT_FN (BUILT_IN_CSIN):
9693 if (validate_arg (arg0, COMPLEX_TYPE)
9694 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9695 return do_mpc_arg1 (arg0, type, mpc_sin);
9698 CASE_FLT_FN (BUILT_IN_CSINH):
9699 if (validate_arg (arg0, COMPLEX_TYPE)
9700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9701 return do_mpc_arg1 (arg0, type, mpc_sinh);
9704 CASE_FLT_FN (BUILT_IN_CTAN):
9705 if (validate_arg (arg0, COMPLEX_TYPE)
9706 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9707 return do_mpc_arg1 (arg0, type, mpc_tan);
9710 CASE_FLT_FN (BUILT_IN_CTANH):
9711 if (validate_arg (arg0, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9713 return do_mpc_arg1 (arg0, type, mpc_tanh);
9716 CASE_FLT_FN (BUILT_IN_CLOG):
9717 if (validate_arg (arg0, COMPLEX_TYPE)
9718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9719 return do_mpc_arg1 (arg0, type, mpc_log);
9722 CASE_FLT_FN (BUILT_IN_CSQRT):
9723 if (validate_arg (arg0, COMPLEX_TYPE)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9725 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9728 CASE_FLT_FN (BUILT_IN_CASIN):
9729 if (validate_arg (arg0, COMPLEX_TYPE)
9730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9731 return do_mpc_arg1 (arg0, type, mpc_asin);
9734 CASE_FLT_FN (BUILT_IN_CACOS):
9735 if (validate_arg (arg0, COMPLEX_TYPE)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9737 return do_mpc_arg1 (arg0, type, mpc_acos);
9740 CASE_FLT_FN (BUILT_IN_CATAN):
9741 if (validate_arg (arg0, COMPLEX_TYPE)
9742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9743 return do_mpc_arg1 (arg0, type, mpc_atan);
9746 CASE_FLT_FN (BUILT_IN_CASINH):
9747 if (validate_arg (arg0, COMPLEX_TYPE)
9748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9749 return do_mpc_arg1 (arg0, type, mpc_asinh);
9752 CASE_FLT_FN (BUILT_IN_CACOSH):
9753 if (validate_arg (arg0, COMPLEX_TYPE)
9754 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9755 return do_mpc_arg1 (arg0, type, mpc_acosh);
9758 CASE_FLT_FN (BUILT_IN_CATANH):
9759 if (validate_arg (arg0, COMPLEX_TYPE)
9760 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9761 return do_mpc_arg1 (arg0, type, mpc_atanh);
9764 CASE_FLT_FN (BUILT_IN_CABS):
9765 return fold_builtin_cabs (loc, arg0, type, fndecl);
9767 CASE_FLT_FN (BUILT_IN_CARG):
9768 return fold_builtin_carg (loc, arg0, type);
9770 CASE_FLT_FN (BUILT_IN_SQRT):
9771 return fold_builtin_sqrt (loc, arg0, type);
9773 CASE_FLT_FN (BUILT_IN_CBRT):
9774 return fold_builtin_cbrt (loc, arg0, type);
9776 CASE_FLT_FN (BUILT_IN_ASIN):
9777 if (validate_arg (arg0, REAL_TYPE))
9778 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9779 &dconstm1, &dconst1, true);
9782 CASE_FLT_FN (BUILT_IN_ACOS):
9783 if (validate_arg (arg0, REAL_TYPE))
9784 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9785 &dconstm1, &dconst1, true);
9788 CASE_FLT_FN (BUILT_IN_ATAN):
9789 if (validate_arg (arg0, REAL_TYPE))
9790 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9793 CASE_FLT_FN (BUILT_IN_ASINH):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9798 CASE_FLT_FN (BUILT_IN_ACOSH):
9799 if (validate_arg (arg0, REAL_TYPE))
9800 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9801 &dconst1, NULL, true);
9804 CASE_FLT_FN (BUILT_IN_ATANH):
9805 if (validate_arg (arg0, REAL_TYPE))
9806 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9807 &dconstm1, &dconst1, false);
9810 CASE_FLT_FN (BUILT_IN_SIN):
9811 if (validate_arg (arg0, REAL_TYPE))
9812 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9815 CASE_FLT_FN (BUILT_IN_COS):
9816 return fold_builtin_cos (loc, arg0, type, fndecl);
9818 CASE_FLT_FN (BUILT_IN_TAN):
9819 return fold_builtin_tan (arg0, type);
9821 CASE_FLT_FN (BUILT_IN_CEXP):
9822 return fold_builtin_cexp (loc, arg0, type);
9824 CASE_FLT_FN (BUILT_IN_CEXPI):
9825 if (validate_arg (arg0, REAL_TYPE))
9826 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9829 CASE_FLT_FN (BUILT_IN_SINH):
9830 if (validate_arg (arg0, REAL_TYPE))
9831 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9834 CASE_FLT_FN (BUILT_IN_COSH):
9835 return fold_builtin_cosh (loc, arg0, type, fndecl);
9837 CASE_FLT_FN (BUILT_IN_TANH):
9838 if (validate_arg (arg0, REAL_TYPE))
9839 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9842 CASE_FLT_FN (BUILT_IN_ERF):
9843 if (validate_arg (arg0, REAL_TYPE))
9844 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9847 CASE_FLT_FN (BUILT_IN_ERFC):
9848 if (validate_arg (arg0, REAL_TYPE))
9849 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9852 CASE_FLT_FN (BUILT_IN_TGAMMA):
9853 if (validate_arg (arg0, REAL_TYPE))
9854 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9857 CASE_FLT_FN (BUILT_IN_EXP):
9858 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9860 CASE_FLT_FN (BUILT_IN_EXP2):
9861 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9863 CASE_FLT_FN (BUILT_IN_EXP10):
9864 CASE_FLT_FN (BUILT_IN_POW10):
9865 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9867 CASE_FLT_FN (BUILT_IN_EXPM1):
9868 if (validate_arg (arg0, REAL_TYPE))
9869 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9872 CASE_FLT_FN (BUILT_IN_LOG):
9873 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9875 CASE_FLT_FN (BUILT_IN_LOG2):
9876 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9878 CASE_FLT_FN (BUILT_IN_LOG10):
9879 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9881 CASE_FLT_FN (BUILT_IN_LOG1P):
9882 if (validate_arg (arg0, REAL_TYPE))
9883 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9884 &dconstm1, NULL, false);
9887 CASE_FLT_FN (BUILT_IN_J0):
9888 if (validate_arg (arg0, REAL_TYPE))
9889 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9893 CASE_FLT_FN (BUILT_IN_J1):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9899 CASE_FLT_FN (BUILT_IN_Y0):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9902 &dconst0, NULL, false);
9905 CASE_FLT_FN (BUILT_IN_Y1):
9906 if (validate_arg (arg0, REAL_TYPE))
9907 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9908 &dconst0, NULL, false);
9911 CASE_FLT_FN (BUILT_IN_NAN):
9912 case BUILT_IN_NAND32:
9913 case BUILT_IN_NAND64:
9914 case BUILT_IN_NAND128:
9915 return fold_builtin_nan (arg0, type, true);
9917 CASE_FLT_FN (BUILT_IN_NANS):
9918 return fold_builtin_nan (arg0, type, false);
9920 CASE_FLT_FN (BUILT_IN_FLOOR):
9921 return fold_builtin_floor (loc, fndecl, arg0);
9923 CASE_FLT_FN (BUILT_IN_CEIL):
9924 return fold_builtin_ceil (loc, fndecl, arg0);
9926 CASE_FLT_FN (BUILT_IN_TRUNC):
9927 return fold_builtin_trunc (loc, fndecl, arg0);
9929 CASE_FLT_FN (BUILT_IN_ROUND):
9930 return fold_builtin_round (loc, fndecl, arg0);
9932 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9933 CASE_FLT_FN (BUILT_IN_RINT):
9934 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9936 CASE_FLT_FN (BUILT_IN_LCEIL):
9937 CASE_FLT_FN (BUILT_IN_LLCEIL):
9938 CASE_FLT_FN (BUILT_IN_LFLOOR):
9939 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9940 CASE_FLT_FN (BUILT_IN_LROUND):
9941 CASE_FLT_FN (BUILT_IN_LLROUND):
9942 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9944 CASE_FLT_FN (BUILT_IN_LRINT):
9945 CASE_FLT_FN (BUILT_IN_LLRINT):
9946 return fold_fixed_mathfn (loc, fndecl, arg0);
9948 case BUILT_IN_BSWAP32:
9949 case BUILT_IN_BSWAP64:
9950 return fold_builtin_bswap (fndecl, arg0);
9952 CASE_INT_FN (BUILT_IN_FFS):
9953 CASE_INT_FN (BUILT_IN_CLZ):
9954 CASE_INT_FN (BUILT_IN_CTZ):
9955 CASE_INT_FN (BUILT_IN_POPCOUNT):
9956 CASE_INT_FN (BUILT_IN_PARITY):
9957 return fold_builtin_bitop (fndecl, arg0);
9959 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9960 return fold_builtin_signbit (loc, arg0, type);
9962 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9963 return fold_builtin_significand (loc, arg0, type);
9965 CASE_FLT_FN (BUILT_IN_ILOGB):
9966 CASE_FLT_FN (BUILT_IN_LOGB):
9967 return fold_builtin_logb (loc, arg0, type);
9969 case BUILT_IN_ISASCII:
9970 return fold_builtin_isascii (loc, arg0);
9972 case BUILT_IN_TOASCII:
9973 return fold_builtin_toascii (loc, arg0);
9975 case BUILT_IN_ISDIGIT:
9976 return fold_builtin_isdigit (loc, arg0);
9978 CASE_FLT_FN (BUILT_IN_FINITE):
9979 case BUILT_IN_FINITED32:
9980 case BUILT_IN_FINITED64:
9981 case BUILT_IN_FINITED128:
9982 case BUILT_IN_ISFINITE:
9984 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9987 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9990 CASE_FLT_FN (BUILT_IN_ISINF):
9991 case BUILT_IN_ISINFD32:
9992 case BUILT_IN_ISINFD64:
9993 case BUILT_IN_ISINFD128:
9995 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9998 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10001 case BUILT_IN_ISNORMAL:
10002 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10004 case BUILT_IN_ISINF_SIGN:
10005 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10007 CASE_FLT_FN (BUILT_IN_ISNAN):
10008 case BUILT_IN_ISNAND32:
10009 case BUILT_IN_ISNAND64:
10010 case BUILT_IN_ISNAND128:
10011 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10013 case BUILT_IN_PRINTF:
10014 case BUILT_IN_PRINTF_UNLOCKED:
10015 case BUILT_IN_VPRINTF:
10016 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10026 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10027 IGNORE is true if the result of the function call is ignored. This
10028 function returns NULL_TREE if no simplification was possible. */
10031 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10033 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10034 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10038 CASE_FLT_FN (BUILT_IN_JN):
10039 if (validate_arg (arg0, INTEGER_TYPE)
10040 && validate_arg (arg1, REAL_TYPE))
10041 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10044 CASE_FLT_FN (BUILT_IN_YN):
10045 if (validate_arg (arg0, INTEGER_TYPE)
10046 && validate_arg (arg1, REAL_TYPE))
10047 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10051 CASE_FLT_FN (BUILT_IN_DREM):
10052 CASE_FLT_FN (BUILT_IN_REMAINDER):
10053 if (validate_arg (arg0, REAL_TYPE)
10054 && validate_arg(arg1, REAL_TYPE))
10055 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10058 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10059 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10060 if (validate_arg (arg0, REAL_TYPE)
10061 && validate_arg(arg1, POINTER_TYPE))
10062 return do_mpfr_lgamma_r (arg0, arg1, type);
10065 CASE_FLT_FN (BUILT_IN_ATAN2):
10066 if (validate_arg (arg0, REAL_TYPE)
10067 && validate_arg(arg1, REAL_TYPE))
10068 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10071 CASE_FLT_FN (BUILT_IN_FDIM):
10072 if (validate_arg (arg0, REAL_TYPE)
10073 && validate_arg(arg1, REAL_TYPE))
10074 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10077 CASE_FLT_FN (BUILT_IN_HYPOT):
10078 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10080 CASE_FLT_FN (BUILT_IN_CPOW):
10081 if (validate_arg (arg0, COMPLEX_TYPE)
10082 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10083 && validate_arg (arg1, COMPLEX_TYPE)
10084 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10085 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10088 CASE_FLT_FN (BUILT_IN_LDEXP):
10089 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10090 CASE_FLT_FN (BUILT_IN_SCALBN):
10091 CASE_FLT_FN (BUILT_IN_SCALBLN):
10092 return fold_builtin_load_exponent (loc, arg0, arg1,
10093 type, /*ldexp=*/false);
10095 CASE_FLT_FN (BUILT_IN_FREXP):
10096 return fold_builtin_frexp (loc, arg0, arg1, type);
10098 CASE_FLT_FN (BUILT_IN_MODF):
10099 return fold_builtin_modf (loc, arg0, arg1, type);
10101 case BUILT_IN_BZERO:
10102 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10104 case BUILT_IN_FPUTS:
10105 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10107 case BUILT_IN_FPUTS_UNLOCKED:
10108 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10110 case BUILT_IN_STRSTR:
10111 return fold_builtin_strstr (loc, arg0, arg1, type);
10113 case BUILT_IN_STRCAT:
10114 return fold_builtin_strcat (loc, arg0, arg1);
10116 case BUILT_IN_STRSPN:
10117 return fold_builtin_strspn (loc, arg0, arg1);
10119 case BUILT_IN_STRCSPN:
10120 return fold_builtin_strcspn (loc, arg0, arg1);
10122 case BUILT_IN_STRCHR:
10123 case BUILT_IN_INDEX:
10124 return fold_builtin_strchr (loc, arg0, arg1, type);
10126 case BUILT_IN_STRRCHR:
10127 case BUILT_IN_RINDEX:
10128 return fold_builtin_strrchr (loc, arg0, arg1, type);
10130 case BUILT_IN_STRCPY:
10131 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10133 case BUILT_IN_STPCPY:
10136 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10140 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10143 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10146 case BUILT_IN_STRCMP:
10147 return fold_builtin_strcmp (loc, arg0, arg1);
10149 case BUILT_IN_STRPBRK:
10150 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10152 case BUILT_IN_EXPECT:
10153 return fold_builtin_expect (loc, arg0, arg1);
10155 CASE_FLT_FN (BUILT_IN_POW):
10156 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10158 CASE_FLT_FN (BUILT_IN_POWI):
10159 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10161 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10162 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10164 CASE_FLT_FN (BUILT_IN_FMIN):
10165 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10167 CASE_FLT_FN (BUILT_IN_FMAX):
10168 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10170 case BUILT_IN_ISGREATER:
10171 return fold_builtin_unordered_cmp (loc, fndecl,
10172 arg0, arg1, UNLE_EXPR, LE_EXPR);
10173 case BUILT_IN_ISGREATEREQUAL:
10174 return fold_builtin_unordered_cmp (loc, fndecl,
10175 arg0, arg1, UNLT_EXPR, LT_EXPR);
10176 case BUILT_IN_ISLESS:
10177 return fold_builtin_unordered_cmp (loc, fndecl,
10178 arg0, arg1, UNGE_EXPR, GE_EXPR);
10179 case BUILT_IN_ISLESSEQUAL:
10180 return fold_builtin_unordered_cmp (loc, fndecl,
10181 arg0, arg1, UNGT_EXPR, GT_EXPR);
10182 case BUILT_IN_ISLESSGREATER:
10183 return fold_builtin_unordered_cmp (loc, fndecl,
10184 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10185 case BUILT_IN_ISUNORDERED:
10186 return fold_builtin_unordered_cmp (loc, fndecl,
10187 arg0, arg1, UNORDERED_EXPR,
10190 /* We do the folding for va_start in the expander. */
10191 case BUILT_IN_VA_START:
10194 case BUILT_IN_SPRINTF:
10195 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10197 case BUILT_IN_OBJECT_SIZE:
10198 return fold_builtin_object_size (arg0, arg1);
10200 case BUILT_IN_PRINTF:
10201 case BUILT_IN_PRINTF_UNLOCKED:
10202 case BUILT_IN_VPRINTF:
10203 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10205 case BUILT_IN_PRINTF_CHK:
10206 case BUILT_IN_VPRINTF_CHK:
10207 if (!validate_arg (arg0, INTEGER_TYPE)
10208 || TREE_SIDE_EFFECTS (arg0))
10211 return fold_builtin_printf (loc, fndecl,
10212 arg1, NULL_TREE, ignore, fcode);
10215 case BUILT_IN_FPRINTF:
10216 case BUILT_IN_FPRINTF_UNLOCKED:
10217 case BUILT_IN_VFPRINTF:
10218 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10227 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10228 and ARG2. IGNORE is true if the result of the function call is ignored.
10229 This function returns NULL_TREE if no simplification was possible. */
10232 fold_builtin_3 (location_t loc, tree fndecl,
10233 tree arg0, tree arg1, tree arg2, bool ignore)
10235 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10236 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10240 CASE_FLT_FN (BUILT_IN_SINCOS):
10241 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10243 CASE_FLT_FN (BUILT_IN_FMA):
10244 if (validate_arg (arg0, REAL_TYPE)
10245 && validate_arg(arg1, REAL_TYPE)
10246 && validate_arg(arg2, REAL_TYPE))
10247 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10250 CASE_FLT_FN (BUILT_IN_REMQUO):
10251 if (validate_arg (arg0, REAL_TYPE)
10252 && validate_arg(arg1, REAL_TYPE)
10253 && validate_arg(arg2, POINTER_TYPE))
10254 return do_mpfr_remquo (arg0, arg1, arg2);
10257 case BUILT_IN_MEMSET:
10258 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10260 case BUILT_IN_BCOPY:
10261 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10262 void_type_node, true, /*endp=*/3);
10264 case BUILT_IN_MEMCPY:
10265 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10266 type, ignore, /*endp=*/0);
10268 case BUILT_IN_MEMPCPY:
10269 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10270 type, ignore, /*endp=*/1);
10272 case BUILT_IN_MEMMOVE:
10273 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10274 type, ignore, /*endp=*/3);
10276 case BUILT_IN_STRNCAT:
10277 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10279 case BUILT_IN_STRNCPY:
10280 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10282 case BUILT_IN_STRNCMP:
10283 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10285 case BUILT_IN_MEMCHR:
10286 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10288 case BUILT_IN_BCMP:
10289 case BUILT_IN_MEMCMP:
10290 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10292 case BUILT_IN_SPRINTF:
10293 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10295 case BUILT_IN_STRCPY_CHK:
10296 case BUILT_IN_STPCPY_CHK:
10297 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10300 case BUILT_IN_STRCAT_CHK:
10301 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10303 case BUILT_IN_PRINTF_CHK:
10304 case BUILT_IN_VPRINTF_CHK:
10305 if (!validate_arg (arg0, INTEGER_TYPE)
10306 || TREE_SIDE_EFFECTS (arg0))
10309 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10312 case BUILT_IN_FPRINTF:
10313 case BUILT_IN_FPRINTF_UNLOCKED:
10314 case BUILT_IN_VFPRINTF:
10315 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10318 case BUILT_IN_FPRINTF_CHK:
10319 case BUILT_IN_VFPRINTF_CHK:
10320 if (!validate_arg (arg1, INTEGER_TYPE)
10321 || TREE_SIDE_EFFECTS (arg1))
10324 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10333 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10334 ARG2, and ARG3. IGNORE is true if the result of the function call is
10335 ignored. This function returns NULL_TREE if no simplification was
10339 fold_builtin_4 (location_t loc, tree fndecl,
10340 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10342 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10346 case BUILT_IN_MEMCPY_CHK:
10347 case BUILT_IN_MEMPCPY_CHK:
10348 case BUILT_IN_MEMMOVE_CHK:
10349 case BUILT_IN_MEMSET_CHK:
10350 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10352 DECL_FUNCTION_CODE (fndecl));
10354 case BUILT_IN_STRNCPY_CHK:
10355 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10357 case BUILT_IN_STRNCAT_CHK:
10358 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10360 case BUILT_IN_FPRINTF_CHK:
10361 case BUILT_IN_VFPRINTF_CHK:
10362 if (!validate_arg (arg1, INTEGER_TYPE)
10363 || TREE_SIDE_EFFECTS (arg1))
10366 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10376 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10377 arguments, where NARGS <= 4. IGNORE is true if the result of the
10378 function call is ignored. This function returns NULL_TREE if no
10379 simplification was possible. Note that this only folds builtins with
10380 fixed argument patterns. Foldings that do varargs-to-varargs
10381 transformations, or that match calls with more than 4 arguments,
10382 need to be handled with fold_builtin_varargs instead. */
10384 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10387 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10389 tree ret = NULL_TREE;
10394 ret = fold_builtin_0 (loc, fndecl, ignore);
10397 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10400 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10403 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10406 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10414 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10415 SET_EXPR_LOCATION (ret, loc);
10416 TREE_NO_WARNING (ret) = 1;
10422 /* Builtins with folding operations that operate on "..." arguments
10423 need special handling; we need to store the arguments in a convenient
10424 data structure before attempting any folding. Fortunately there are
10425 only a few builtins that fall into this category. FNDECL is the
10426 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10427 result of the function call is ignored. */
10430 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10431 bool ignore ATTRIBUTE_UNUSED)
10433 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10434 tree ret = NULL_TREE;
10438 case BUILT_IN_SPRINTF_CHK:
10439 case BUILT_IN_VSPRINTF_CHK:
10440 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10443 case BUILT_IN_SNPRINTF_CHK:
10444 case BUILT_IN_VSNPRINTF_CHK:
10445 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10448 case BUILT_IN_FPCLASSIFY:
10449 ret = fold_builtin_fpclassify (loc, exp);
10457 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10458 SET_EXPR_LOCATION (ret, loc);
10459 TREE_NO_WARNING (ret) = 1;
10465 /* Return true if FNDECL shouldn't be folded right now.
10466 If a built-in function has an inline attribute always_inline
10467 wrapper, defer folding it after always_inline functions have
10468 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10469 might not be performed. */
10472 avoid_folding_inline_builtin (tree fndecl)
10474 return (DECL_DECLARED_INLINE_P (fndecl)
10475 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10477 && !cfun->always_inline_functions_inlined
10478 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10481 /* A wrapper function for builtin folding that prevents warnings for
10482 "statement without effect" and the like, caused by removing the
10483 call node earlier than the warning is generated. */
10486 fold_call_expr (location_t loc, tree exp, bool ignore)
10488 tree ret = NULL_TREE;
10489 tree fndecl = get_callee_fndecl (exp);
10491 && TREE_CODE (fndecl) == FUNCTION_DECL
10492 && DECL_BUILT_IN (fndecl)
10493 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10494 yet. Defer folding until we see all the arguments
10495 (after inlining). */
10496 && !CALL_EXPR_VA_ARG_PACK (exp))
10498 int nargs = call_expr_nargs (exp);
10500 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10501 instead last argument is __builtin_va_arg_pack (). Defer folding
10502 even in that case, until arguments are finalized. */
10503 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10505 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10507 && TREE_CODE (fndecl2) == FUNCTION_DECL
10508 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10509 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10513 if (avoid_folding_inline_builtin (fndecl))
10516 /* FIXME: Don't use a list in this interface. */
10517 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10518 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10521 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10523 tree *args = CALL_EXPR_ARGP (exp);
10524 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10527 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10535 /* Conveniently construct a function call expression. FNDECL names the
10536 function to be called and ARGLIST is a TREE_LIST of arguments. */
10539 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10541 tree fntype = TREE_TYPE (fndecl);
10542 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10543 int n = list_length (arglist);
10544 tree *argarray = (tree *) alloca (n * sizeof (tree));
10547 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10548 argarray[i] = TREE_VALUE (arglist);
10549 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10552 /* Conveniently construct a function call expression. FNDECL names the
10553 function to be called, N is the number of arguments, and the "..."
10554 parameters are the argument expressions. */
10557 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10560 tree fntype = TREE_TYPE (fndecl);
10561 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10562 tree *argarray = (tree *) alloca (n * sizeof (tree));
10566 for (i = 0; i < n; i++)
10567 argarray[i] = va_arg (ap, tree);
10569 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10572 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10573 N arguments are passed in the array ARGARRAY. */
10576 fold_builtin_call_array (location_t loc, tree type,
10581 tree ret = NULL_TREE;
10585 if (TREE_CODE (fn) == ADDR_EXPR)
10587 tree fndecl = TREE_OPERAND (fn, 0);
10588 if (TREE_CODE (fndecl) == FUNCTION_DECL
10589 && DECL_BUILT_IN (fndecl))
10591 /* If last argument is __builtin_va_arg_pack (), arguments to this
10592 function are not finalized yet. Defer folding until they are. */
10593 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10595 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10597 && TREE_CODE (fndecl2) == FUNCTION_DECL
10598 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10599 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10600 return build_call_array_loc (loc, type, fn, n, argarray);
10602 if (avoid_folding_inline_builtin (fndecl))
10603 return build_call_array_loc (loc, type, fn, n, argarray);
10604 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10606 tree arglist = NULL_TREE;
10607 for (i = n - 1; i >= 0; i--)
10608 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10609 ret = targetm.fold_builtin (fndecl, arglist, false);
10612 return build_call_array_loc (loc, type, fn, n, argarray);
10614 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10616 /* First try the transformations that don't require consing up
10618 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10623 /* If we got this far, we need to build an exp. */
10624 exp = build_call_array_loc (loc, type, fn, n, argarray);
10625 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10626 return ret ? ret : exp;
10630 return build_call_array_loc (loc, type, fn, n, argarray);
10633 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10634 along with N new arguments specified as the "..." parameters. SKIP
10635 is the number of arguments in EXP to be omitted. This function is used
10636 to do varargs-to-varargs transformations. */
10639 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10641 int oldnargs = call_expr_nargs (exp);
10642 int nargs = oldnargs - skip + n;
10643 tree fntype = TREE_TYPE (fndecl);
10644 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10652 buffer = XALLOCAVEC (tree, nargs);
10654 for (i = 0; i < n; i++)
10655 buffer[i] = va_arg (ap, tree);
10657 for (j = skip; j < oldnargs; j++, i++)
10658 buffer[i] = CALL_EXPR_ARG (exp, j);
10661 buffer = CALL_EXPR_ARGP (exp) + skip;
10663 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10666 /* Validate a single argument ARG against a tree code CODE representing
10670 validate_arg (const_tree arg, enum tree_code code)
10674 else if (code == POINTER_TYPE)
10675 return POINTER_TYPE_P (TREE_TYPE (arg));
10676 else if (code == INTEGER_TYPE)
10677 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10678 return code == TREE_CODE (TREE_TYPE (arg));
10681 /* This function validates the types of a function call argument list
10682 against a specified list of tree_codes. If the last specifier is a 0,
10683 that represents an ellipses, otherwise the last specifier must be a
10686 This is the GIMPLE version of validate_arglist. Eventually we want to
10687 completely convert builtins.c to work from GIMPLEs and the tree based
10688 validate_arglist will then be removed. */
10691 validate_gimple_arglist (const_gimple call, ...)
10693 enum tree_code code;
10699 va_start (ap, call);
10704 code = (enum tree_code) va_arg (ap, int);
10708 /* This signifies an ellipses, any further arguments are all ok. */
10712 /* This signifies an endlink, if no arguments remain, return
10713 true, otherwise return false. */
10714 res = (i == gimple_call_num_args (call));
10717 /* If no parameters remain or the parameter's code does not
10718 match the specified code, return false. Otherwise continue
10719 checking any remaining arguments. */
10720 arg = gimple_call_arg (call, i++);
10721 if (!validate_arg (arg, code))
10728 /* We need gotos here since we can only have one VA_CLOSE in a
10736 /* This function validates the types of a function call argument list
10737 against a specified list of tree_codes. If the last specifier is a 0,
10738 that represents an ellipses, otherwise the last specifier must be a
10742 validate_arglist (const_tree callexpr, ...)
10744 enum tree_code code;
10747 const_call_expr_arg_iterator iter;
10750 va_start (ap, callexpr);
10751 init_const_call_expr_arg_iterator (callexpr, &iter);
10755 code = (enum tree_code) va_arg (ap, int);
10759 /* This signifies an ellipses, any further arguments are all ok. */
10763 /* This signifies an endlink, if no arguments remain, return
10764 true, otherwise return false. */
10765 res = !more_const_call_expr_args_p (&iter);
10768 /* If no parameters remain or the parameter's code does not
10769 match the specified code, return false. Otherwise continue
10770 checking any remaining arguments. */
10771 arg = next_const_call_expr_arg (&iter);
10772 if (!validate_arg (arg, code))
10779 /* We need gotos here since we can only have one VA_CLOSE in a
10787 /* Default target-specific builtin expander that does nothing. */
10790 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10791 rtx target ATTRIBUTE_UNUSED,
10792 rtx subtarget ATTRIBUTE_UNUSED,
10793 enum machine_mode mode ATTRIBUTE_UNUSED,
10794 int ignore ATTRIBUTE_UNUSED)
10799 /* Returns true is EXP represents data that would potentially reside
10800 in a readonly section. */
10803 readonly_data_expr (tree exp)
10807 if (TREE_CODE (exp) != ADDR_EXPR)
10810 exp = get_base_address (TREE_OPERAND (exp, 0));
10814 /* Make sure we call decl_readonly_section only for trees it
10815 can handle (since it returns true for everything it doesn't
10817 if (TREE_CODE (exp) == STRING_CST
10818 || TREE_CODE (exp) == CONSTRUCTOR
10819 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10820 return decl_readonly_section (exp, 0);
10825 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10826 to the call, and TYPE is its return type.
10828 Return NULL_TREE if no simplification was possible, otherwise return the
10829 simplified form of the call as a tree.
10831 The simplified form may be a constant or other expression which
10832 computes the same value, but in a more efficient manner (including
10833 calls to other builtin functions).
10835 The call may contain arguments which need to be evaluated, but
10836 which are not useful to determine the result of the call. In
10837 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10838 COMPOUND_EXPR will be an argument which must be evaluated.
10839 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10840 COMPOUND_EXPR in the chain will contain the tree for the simplified
10841 form of the builtin function call. */
10844 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10846 if (!validate_arg (s1, POINTER_TYPE)
10847 || !validate_arg (s2, POINTER_TYPE))
10852 const char *p1, *p2;
10854 p2 = c_getstr (s2);
10858 p1 = c_getstr (s1);
10861 const char *r = strstr (p1, p2);
10865 return build_int_cst (TREE_TYPE (s1), 0);
10867 /* Return an offset into the constant string argument. */
10868 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10869 s1, size_int (r - p1));
10870 return fold_convert_loc (loc, type, tem);
10873 /* The argument is const char *, and the result is char *, so we need
10874 a type conversion here to avoid a warning. */
10876 return fold_convert_loc (loc, type, s1);
10881 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10885 /* New argument list transforming strstr(s1, s2) to
10886 strchr(s1, s2[0]). */
10887 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10891 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10892 the call, and TYPE is its return type.
10894 Return NULL_TREE if no simplification was possible, otherwise return the
10895 simplified form of the call as a tree.
10897 The simplified form may be a constant or other expression which
10898 computes the same value, but in a more efficient manner (including
10899 calls to other builtin functions).
10901 The call may contain arguments which need to be evaluated, but
10902 which are not useful to determine the result of the call. In
10903 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10904 COMPOUND_EXPR will be an argument which must be evaluated.
10905 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10906 COMPOUND_EXPR in the chain will contain the tree for the simplified
10907 form of the builtin function call. */
10910 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10912 if (!validate_arg (s1, POINTER_TYPE)
10913 || !validate_arg (s2, INTEGER_TYPE))
10919 if (TREE_CODE (s2) != INTEGER_CST)
10922 p1 = c_getstr (s1);
10929 if (target_char_cast (s2, &c))
10932 r = strchr (p1, c);
10935 return build_int_cst (TREE_TYPE (s1), 0);
10937 /* Return an offset into the constant string argument. */
10938 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10939 s1, size_int (r - p1));
10940 return fold_convert_loc (loc, type, tem);
10946 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10947 the call, and TYPE is its return type.
10949 Return NULL_TREE if no simplification was possible, otherwise return the
10950 simplified form of the call as a tree.
10952 The simplified form may be a constant or other expression which
10953 computes the same value, but in a more efficient manner (including
10954 calls to other builtin functions).
10956 The call may contain arguments which need to be evaluated, but
10957 which are not useful to determine the result of the call. In
10958 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10959 COMPOUND_EXPR will be an argument which must be evaluated.
10960 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10961 COMPOUND_EXPR in the chain will contain the tree for the simplified
10962 form of the builtin function call. */
10965 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10967 if (!validate_arg (s1, POINTER_TYPE)
10968 || !validate_arg (s2, INTEGER_TYPE))
10975 if (TREE_CODE (s2) != INTEGER_CST)
10978 p1 = c_getstr (s1);
10985 if (target_char_cast (s2, &c))
10988 r = strrchr (p1, c);
10991 return build_int_cst (TREE_TYPE (s1), 0);
10993 /* Return an offset into the constant string argument. */
10994 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10995 s1, size_int (r - p1));
10996 return fold_convert_loc (loc, type, tem);
10999 if (! integer_zerop (s2))
11002 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11006 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11007 return build_call_expr_loc (loc, fn, 2, s1, s2);
11011 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11012 to the call, and TYPE is its return type.
11014 Return NULL_TREE if no simplification was possible, otherwise return the
11015 simplified form of the call as a tree.
11017 The simplified form may be a constant or other expression which
11018 computes the same value, but in a more efficient manner (including
11019 calls to other builtin functions).
11021 The call may contain arguments which need to be evaluated, but
11022 which are not useful to determine the result of the call. In
11023 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11024 COMPOUND_EXPR will be an argument which must be evaluated.
11025 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11026 COMPOUND_EXPR in the chain will contain the tree for the simplified
11027 form of the builtin function call. */
11030 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11032 if (!validate_arg (s1, POINTER_TYPE)
11033 || !validate_arg (s2, POINTER_TYPE))
11038 const char *p1, *p2;
11040 p2 = c_getstr (s2);
11044 p1 = c_getstr (s1);
11047 const char *r = strpbrk (p1, p2);
11051 return build_int_cst (TREE_TYPE (s1), 0);
11053 /* Return an offset into the constant string argument. */
11054 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11055 s1, size_int (r - p1));
11056 return fold_convert_loc (loc, type, tem);
11060 /* strpbrk(x, "") == NULL.
11061 Evaluate and ignore s1 in case it had side-effects. */
11062 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11065 return NULL_TREE; /* Really call strpbrk. */
11067 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11071 /* New argument list transforming strpbrk(s1, s2) to
11072 strchr(s1, s2[0]). */
11073 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11077 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11080 Return NULL_TREE if no simplification was possible, otherwise return the
11081 simplified form of the call as a tree.
11083 The simplified form may be a constant or other expression which
11084 computes the same value, but in a more efficient manner (including
11085 calls to other builtin functions).
11087 The call may contain arguments which need to be evaluated, but
11088 which are not useful to determine the result of the call. In
11089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11090 COMPOUND_EXPR will be an argument which must be evaluated.
11091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11092 COMPOUND_EXPR in the chain will contain the tree for the simplified
11093 form of the builtin function call. */
11096 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11098 if (!validate_arg (dst, POINTER_TYPE)
11099 || !validate_arg (src, POINTER_TYPE))
11103 const char *p = c_getstr (src);
11105 /* If the string length is zero, return the dst parameter. */
11106 if (p && *p == '\0')
11109 if (optimize_insn_for_speed_p ())
11111 /* See if we can store by pieces into (dst + strlen(dst)). */
11113 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11114 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11116 if (!strlen_fn || !strcpy_fn)
11119 /* If we don't have a movstr we don't want to emit an strcpy
11120 call. We have to do that if the length of the source string
11121 isn't computable (in that case we can use memcpy probably
11122 later expanding to a sequence of mov instructions). If we
11123 have movstr instructions we can emit strcpy calls. */
11126 tree len = c_strlen (src, 1);
11127 if (! len || TREE_SIDE_EFFECTS (len))
11131 /* Stabilize the argument list. */
11132 dst = builtin_save_expr (dst);
11134 /* Create strlen (dst). */
11135 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11136 /* Create (dst p+ strlen (dst)). */
11138 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11139 TREE_TYPE (dst), dst, newdst);
11140 newdst = builtin_save_expr (newdst);
11142 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11143 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11149 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11150 arguments to the call.
11152 Return NULL_TREE if no simplification was possible, otherwise return the
11153 simplified form of the call as a tree.
11155 The simplified form may be a constant or other expression which
11156 computes the same value, but in a more efficient manner (including
11157 calls to other builtin functions).
11159 The call may contain arguments which need to be evaluated, but
11160 which are not useful to determine the result of the call. In
11161 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11162 COMPOUND_EXPR will be an argument which must be evaluated.
11163 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11164 COMPOUND_EXPR in the chain will contain the tree for the simplified
11165 form of the builtin function call. */
11168 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11170 if (!validate_arg (dst, POINTER_TYPE)
11171 || !validate_arg (src, POINTER_TYPE)
11172 || !validate_arg (len, INTEGER_TYPE))
11176 const char *p = c_getstr (src);
11178 /* If the requested length is zero, or the src parameter string
11179 length is zero, return the dst parameter. */
11180 if (integer_zerop (len) || (p && *p == '\0'))
11181 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11183 /* If the requested len is greater than or equal to the string
11184 length, call strcat. */
11185 if (TREE_CODE (len) == INTEGER_CST && p
11186 && compare_tree_int (len, strlen (p)) >= 0)
11188 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11190 /* If the replacement _DECL isn't initialized, don't do the
11195 return build_call_expr_loc (loc, fn, 2, dst, src);
11201 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11204 Return NULL_TREE if no simplification was possible, otherwise return the
11205 simplified form of the call as a tree.
11207 The simplified form may be a constant or other expression which
11208 computes the same value, but in a more efficient manner (including
11209 calls to other builtin functions).
11211 The call may contain arguments which need to be evaluated, but
11212 which are not useful to determine the result of the call. In
11213 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11214 COMPOUND_EXPR will be an argument which must be evaluated.
11215 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11216 COMPOUND_EXPR in the chain will contain the tree for the simplified
11217 form of the builtin function call. */
11220 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11222 if (!validate_arg (s1, POINTER_TYPE)
11223 || !validate_arg (s2, POINTER_TYPE))
11227 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11229 /* If both arguments are constants, evaluate at compile-time. */
11232 const size_t r = strspn (p1, p2);
11233 return size_int (r);
11236 /* If either argument is "", return NULL_TREE. */
11237 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11238 /* Evaluate and ignore both arguments in case either one has
11240 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11246 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11249 Return NULL_TREE if no simplification was possible, otherwise return the
11250 simplified form of the call as a tree.
11252 The simplified form may be a constant or other expression which
11253 computes the same value, but in a more efficient manner (including
11254 calls to other builtin functions).
11256 The call may contain arguments which need to be evaluated, but
11257 which are not useful to determine the result of the call. In
11258 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11259 COMPOUND_EXPR will be an argument which must be evaluated.
11260 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11261 COMPOUND_EXPR in the chain will contain the tree for the simplified
11262 form of the builtin function call. */
11265 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11267 if (!validate_arg (s1, POINTER_TYPE)
11268 || !validate_arg (s2, POINTER_TYPE))
11272 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11274 /* If both arguments are constants, evaluate at compile-time. */
11277 const size_t r = strcspn (p1, p2);
11278 return size_int (r);
11281 /* If the first argument is "", return NULL_TREE. */
11282 if (p1 && *p1 == '\0')
11284 /* Evaluate and ignore argument s2 in case it has
11286 return omit_one_operand_loc (loc, size_type_node,
11287 size_zero_node, s2);
11290 /* If the second argument is "", return __builtin_strlen(s1). */
11291 if (p2 && *p2 == '\0')
11293 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11295 /* If the replacement _DECL isn't initialized, don't do the
11300 return build_call_expr_loc (loc, fn, 1, s1);
11306 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11307 to the call. IGNORE is true if the value returned
11308 by the builtin will be ignored. UNLOCKED is true is true if this
11309 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11310 the known length of the string. Return NULL_TREE if no simplification
11314 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11315 bool ignore, bool unlocked, tree len)
11317 /* If we're using an unlocked function, assume the other unlocked
11318 functions exist explicitly. */
11319 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11320 : implicit_built_in_decls[BUILT_IN_FPUTC];
11321 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11322 : implicit_built_in_decls[BUILT_IN_FWRITE];
11324 /* If the return value is used, don't do the transformation. */
11328 /* Verify the arguments in the original call. */
11329 if (!validate_arg (arg0, POINTER_TYPE)
11330 || !validate_arg (arg1, POINTER_TYPE))
11334 len = c_strlen (arg0, 0);
11336 /* Get the length of the string passed to fputs. If the length
11337 can't be determined, punt. */
11339 || TREE_CODE (len) != INTEGER_CST)
11342 switch (compare_tree_int (len, 1))
11344 case -1: /* length is 0, delete the call entirely . */
11345 return omit_one_operand_loc (loc, integer_type_node,
11346 integer_zero_node, arg1);;
11348 case 0: /* length is 1, call fputc. */
11350 const char *p = c_getstr (arg0);
11355 return build_call_expr_loc (loc, fn_fputc, 2,
11356 build_int_cst (NULL_TREE, p[0]), arg1);
11362 case 1: /* length is greater than 1, call fwrite. */
11364 /* If optimizing for size keep fputs. */
11365 if (optimize_function_for_size_p (cfun))
11367 /* New argument list transforming fputs(string, stream) to
11368 fwrite(string, 1, len, stream). */
11370 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11371 size_one_node, len, arg1);
11376 gcc_unreachable ();
11381 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11382 produced. False otherwise. This is done so that we don't output the error
11383 or warning twice or three times. */
11386 fold_builtin_next_arg (tree exp, bool va_start_p)
11388 tree fntype = TREE_TYPE (current_function_decl);
11389 int nargs = call_expr_nargs (exp);
11392 if (TYPE_ARG_TYPES (fntype) == 0
11393 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11394 == void_type_node))
11396 error ("%<va_start%> used in function with fixed args");
11402 if (va_start_p && (nargs != 2))
11404 error ("wrong number of arguments to function %<va_start%>");
11407 arg = CALL_EXPR_ARG (exp, 1);
11409 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11410 when we checked the arguments and if needed issued a warning. */
11415 /* Evidently an out of date version of <stdarg.h>; can't validate
11416 va_start's second argument, but can still work as intended. */
11417 warning (0, "%<__builtin_next_arg%> called without an argument");
11420 else if (nargs > 1)
11422 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11425 arg = CALL_EXPR_ARG (exp, 0);
11428 if (TREE_CODE (arg) == SSA_NAME)
11429 arg = SSA_NAME_VAR (arg);
11431 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11432 or __builtin_next_arg (0) the first time we see it, after checking
11433 the arguments and if needed issuing a warning. */
11434 if (!integer_zerop (arg))
11436 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11438 /* Strip off all nops for the sake of the comparison. This
11439 is not quite the same as STRIP_NOPS. It does more.
11440 We must also strip off INDIRECT_EXPR for C++ reference
11442 while (CONVERT_EXPR_P (arg)
11443 || TREE_CODE (arg) == INDIRECT_REF)
11444 arg = TREE_OPERAND (arg, 0);
11445 if (arg != last_parm)
11447 /* FIXME: Sometimes with the tree optimizers we can get the
11448 not the last argument even though the user used the last
11449 argument. We just warn and set the arg to be the last
11450 argument so that we will get wrong-code because of
11452 warning (0, "second parameter of %<va_start%> not last named argument");
11455 /* Undefined by C99 7.15.1.4p4 (va_start):
11456 "If the parameter parmN is declared with the register storage
11457 class, with a function or array type, or with a type that is
11458 not compatible with the type that results after application of
11459 the default argument promotions, the behavior is undefined."
11461 else if (DECL_REGISTER (arg))
11462 warning (0, "undefined behaviour when second parameter of "
11463 "%<va_start%> is declared with %<register%> storage");
11465 /* We want to verify the second parameter just once before the tree
11466 optimizers are run and then avoid keeping it in the tree,
11467 as otherwise we could warn even for correct code like:
11468 void foo (int i, ...)
11469 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11471 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11473 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11479 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11480 ORIG may be null if this is a 2-argument call. We don't attempt to
11481 simplify calls with more than 3 arguments.
11483 Return NULL_TREE if no simplification was possible, otherwise return the
11484 simplified form of the call as a tree. If IGNORED is true, it means that
11485 the caller does not use the returned value of the function. */
11488 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11489 tree orig, int ignored)
11492 const char *fmt_str = NULL;
11494 /* Verify the required arguments in the original call. We deal with two
11495 types of sprintf() calls: 'sprintf (str, fmt)' and
11496 'sprintf (dest, "%s", orig)'. */
11497 if (!validate_arg (dest, POINTER_TYPE)
11498 || !validate_arg (fmt, POINTER_TYPE))
11500 if (orig && !validate_arg (orig, POINTER_TYPE))
11503 /* Check whether the format is a literal string constant. */
11504 fmt_str = c_getstr (fmt);
11505 if (fmt_str == NULL)
11509 retval = NULL_TREE;
11511 if (!init_target_chars ())
11514 /* If the format doesn't contain % args or %%, use strcpy. */
11515 if (strchr (fmt_str, target_percent) == NULL)
11517 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11522 /* Don't optimize sprintf (buf, "abc", ptr++). */
11526 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11527 'format' is known to contain no % formats. */
11528 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11530 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11533 /* If the format is "%s", use strcpy if the result isn't used. */
11534 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11537 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11542 /* Don't crash on sprintf (str1, "%s"). */
11546 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11549 retval = c_strlen (orig, 1);
11550 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11553 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11556 if (call && retval)
11558 retval = fold_convert_loc
11559 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11561 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11567 /* Expand a call EXP to __builtin_object_size. */
11570 expand_builtin_object_size (tree exp)
11573 int object_size_type;
11574 tree fndecl = get_callee_fndecl (exp);
11576 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11578 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11580 expand_builtin_trap ();
11584 ost = CALL_EXPR_ARG (exp, 1);
11587 if (TREE_CODE (ost) != INTEGER_CST
11588 || tree_int_cst_sgn (ost) < 0
11589 || compare_tree_int (ost, 3) > 0)
11591 error ("%Klast argument of %D is not integer constant between 0 and 3",
11593 expand_builtin_trap ();
11597 object_size_type = tree_low_cst (ost, 0);
11599 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11602 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11603 FCODE is the BUILT_IN_* to use.
11604 Return NULL_RTX if we failed; the caller should emit a normal call,
11605 otherwise try to get the result in TARGET, if convenient (and in
11606 mode MODE if that's convenient). */
11609 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11610 enum built_in_function fcode)
11612 tree dest, src, len, size;
11614 if (!validate_arglist (exp,
11616 fcode == BUILT_IN_MEMSET_CHK
11617 ? INTEGER_TYPE : POINTER_TYPE,
11618 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11621 dest = CALL_EXPR_ARG (exp, 0);
11622 src = CALL_EXPR_ARG (exp, 1);
11623 len = CALL_EXPR_ARG (exp, 2);
11624 size = CALL_EXPR_ARG (exp, 3);
11626 if (! host_integerp (size, 1))
11629 if (host_integerp (len, 1) || integer_all_onesp (size))
11633 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11635 warning_at (tree_nonartificial_location (exp),
11636 0, "%Kcall to %D will always overflow destination buffer",
11637 exp, get_callee_fndecl (exp));
11642 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11643 mem{cpy,pcpy,move,set} is available. */
11646 case BUILT_IN_MEMCPY_CHK:
11647 fn = built_in_decls[BUILT_IN_MEMCPY];
11649 case BUILT_IN_MEMPCPY_CHK:
11650 fn = built_in_decls[BUILT_IN_MEMPCPY];
11652 case BUILT_IN_MEMMOVE_CHK:
11653 fn = built_in_decls[BUILT_IN_MEMMOVE];
11655 case BUILT_IN_MEMSET_CHK:
11656 fn = built_in_decls[BUILT_IN_MEMSET];
11665 fn = build_call_nofold (fn, 3, dest, src, len);
11666 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11667 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11668 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11670 else if (fcode == BUILT_IN_MEMSET_CHK)
11674 unsigned int dest_align
11675 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11677 /* If DEST is not a pointer type, call the normal function. */
11678 if (dest_align == 0)
11681 /* If SRC and DEST are the same (and not volatile), do nothing. */
11682 if (operand_equal_p (src, dest, 0))
11686 if (fcode != BUILT_IN_MEMPCPY_CHK)
11688 /* Evaluate and ignore LEN in case it has side-effects. */
11689 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11690 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11693 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11694 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11697 /* __memmove_chk special case. */
11698 if (fcode == BUILT_IN_MEMMOVE_CHK)
11700 unsigned int src_align
11701 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11703 if (src_align == 0)
11706 /* If src is categorized for a readonly section we can use
11707 normal __memcpy_chk. */
11708 if (readonly_data_expr (src))
11710 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11713 fn = build_call_nofold (fn, 4, dest, src, len, size);
11714 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11715 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11716 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11723 /* Emit warning if a buffer overflow is detected at compile time. */
11726 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11730 location_t loc = tree_nonartificial_location (exp);
11734 case BUILT_IN_STRCPY_CHK:
11735 case BUILT_IN_STPCPY_CHK:
11736 /* For __strcat_chk the warning will be emitted only if overflowing
11737 by at least strlen (dest) + 1 bytes. */
11738 case BUILT_IN_STRCAT_CHK:
11739 len = CALL_EXPR_ARG (exp, 1);
11740 size = CALL_EXPR_ARG (exp, 2);
11743 case BUILT_IN_STRNCAT_CHK:
11744 case BUILT_IN_STRNCPY_CHK:
11745 len = CALL_EXPR_ARG (exp, 2);
11746 size = CALL_EXPR_ARG (exp, 3);
11748 case BUILT_IN_SNPRINTF_CHK:
11749 case BUILT_IN_VSNPRINTF_CHK:
11750 len = CALL_EXPR_ARG (exp, 1);
11751 size = CALL_EXPR_ARG (exp, 3);
11754 gcc_unreachable ();
11760 if (! host_integerp (size, 1) || integer_all_onesp (size))
11765 len = c_strlen (len, 1);
11766 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11769 else if (fcode == BUILT_IN_STRNCAT_CHK)
11771 tree src = CALL_EXPR_ARG (exp, 1);
11772 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11774 src = c_strlen (src, 1);
11775 if (! src || ! host_integerp (src, 1))
11777 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11778 exp, get_callee_fndecl (exp));
11781 else if (tree_int_cst_lt (src, size))
11784 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11787 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11788 exp, get_callee_fndecl (exp));
11791 /* Emit warning if a buffer overflow is detected at compile time
11792 in __sprintf_chk/__vsprintf_chk calls. */
11795 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11797 tree size, len, fmt;
11798 const char *fmt_str;
11799 int nargs = call_expr_nargs (exp);
11801 /* Verify the required arguments in the original call. */
11805 size = CALL_EXPR_ARG (exp, 2);
11806 fmt = CALL_EXPR_ARG (exp, 3);
11808 if (! host_integerp (size, 1) || integer_all_onesp (size))
11811 /* Check whether the format is a literal string constant. */
11812 fmt_str = c_getstr (fmt);
11813 if (fmt_str == NULL)
11816 if (!init_target_chars ())
11819 /* If the format doesn't contain % args or %%, we know its size. */
11820 if (strchr (fmt_str, target_percent) == 0)
11821 len = build_int_cstu (size_type_node, strlen (fmt_str));
11822 /* If the format is "%s" and first ... argument is a string literal,
11824 else if (fcode == BUILT_IN_SPRINTF_CHK
11825 && strcmp (fmt_str, target_percent_s) == 0)
11831 arg = CALL_EXPR_ARG (exp, 4);
11832 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11835 len = c_strlen (arg, 1);
11836 if (!len || ! host_integerp (len, 1))
11842 if (! tree_int_cst_lt (len, size))
11843 warning_at (tree_nonartificial_location (exp),
11844 0, "%Kcall to %D will always overflow destination buffer",
11845 exp, get_callee_fndecl (exp));
11848 /* Emit warning if a free is called with address of a variable. */
11851 maybe_emit_free_warning (tree exp)
11853 tree arg = CALL_EXPR_ARG (exp, 0);
11856 if (TREE_CODE (arg) != ADDR_EXPR)
11859 arg = get_base_address (TREE_OPERAND (arg, 0));
11860 if (arg == NULL || INDIRECT_REF_P (arg))
11863 if (SSA_VAR_P (arg))
11864 warning_at (tree_nonartificial_location (exp),
11865 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11867 warning_at (tree_nonartificial_location (exp),
11868 0, "%Kattempt to free a non-heap object", exp);
11871 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11875 fold_builtin_object_size (tree ptr, tree ost)
11877 tree ret = NULL_TREE;
11878 int object_size_type;
11880 if (!validate_arg (ptr, POINTER_TYPE)
11881 || !validate_arg (ost, INTEGER_TYPE))
11886 if (TREE_CODE (ost) != INTEGER_CST
11887 || tree_int_cst_sgn (ost) < 0
11888 || compare_tree_int (ost, 3) > 0)
11891 object_size_type = tree_low_cst (ost, 0);
11893 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11894 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11895 and (size_t) 0 for types 2 and 3. */
11896 if (TREE_SIDE_EFFECTS (ptr))
11897 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11899 if (TREE_CODE (ptr) == ADDR_EXPR)
11900 ret = build_int_cstu (size_type_node,
11901 compute_builtin_object_size (ptr, object_size_type));
11903 else if (TREE_CODE (ptr) == SSA_NAME)
11905 unsigned HOST_WIDE_INT bytes;
11907 /* If object size is not known yet, delay folding until
11908 later. Maybe subsequent passes will help determining
11910 bytes = compute_builtin_object_size (ptr, object_size_type);
11911 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11913 ret = build_int_cstu (size_type_node, bytes);
11918 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11919 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11920 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11927 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11928 DEST, SRC, LEN, and SIZE are the arguments to the call.
11929 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11930 code of the builtin. If MAXLEN is not NULL, it is maximum length
11931 passed as third argument. */
11934 fold_builtin_memory_chk (location_t loc, tree fndecl,
11935 tree dest, tree src, tree len, tree size,
11936 tree maxlen, bool ignore,
11937 enum built_in_function fcode)
11941 if (!validate_arg (dest, POINTER_TYPE)
11942 || !validate_arg (src,
11943 (fcode == BUILT_IN_MEMSET_CHK
11944 ? INTEGER_TYPE : POINTER_TYPE))
11945 || !validate_arg (len, INTEGER_TYPE)
11946 || !validate_arg (size, INTEGER_TYPE))
11949 /* If SRC and DEST are the same (and not volatile), return DEST
11950 (resp. DEST+LEN for __mempcpy_chk). */
11951 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11953 if (fcode != BUILT_IN_MEMPCPY_CHK)
11954 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11958 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11960 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11964 if (! host_integerp (size, 1))
11967 if (! integer_all_onesp (size))
11969 if (! host_integerp (len, 1))
11971 /* If LEN is not constant, try MAXLEN too.
11972 For MAXLEN only allow optimizing into non-_ocs function
11973 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11974 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11976 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11978 /* (void) __mempcpy_chk () can be optimized into
11979 (void) __memcpy_chk (). */
11980 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11984 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11992 if (tree_int_cst_lt (size, maxlen))
11997 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11998 mem{cpy,pcpy,move,set} is available. */
12001 case BUILT_IN_MEMCPY_CHK:
12002 fn = built_in_decls[BUILT_IN_MEMCPY];
12004 case BUILT_IN_MEMPCPY_CHK:
12005 fn = built_in_decls[BUILT_IN_MEMPCPY];
12007 case BUILT_IN_MEMMOVE_CHK:
12008 fn = built_in_decls[BUILT_IN_MEMMOVE];
12010 case BUILT_IN_MEMSET_CHK:
12011 fn = built_in_decls[BUILT_IN_MEMSET];
12020 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12023 /* Fold a call to the __st[rp]cpy_chk builtin.
12024 DEST, SRC, and SIZE are the arguments to the call.
12025 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12026 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12027 strings passed as second argument. */
12030 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12031 tree src, tree size,
12032 tree maxlen, bool ignore,
12033 enum built_in_function fcode)
12037 if (!validate_arg (dest, POINTER_TYPE)
12038 || !validate_arg (src, POINTER_TYPE)
12039 || !validate_arg (size, INTEGER_TYPE))
12042 /* If SRC and DEST are the same (and not volatile), return DEST. */
12043 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12044 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12046 if (! host_integerp (size, 1))
12049 if (! integer_all_onesp (size))
12051 len = c_strlen (src, 1);
12052 if (! len || ! host_integerp (len, 1))
12054 /* If LEN is not constant, try MAXLEN too.
12055 For MAXLEN only allow optimizing into non-_ocs function
12056 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12057 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12059 if (fcode == BUILT_IN_STPCPY_CHK)
12064 /* If return value of __stpcpy_chk is ignored,
12065 optimize into __strcpy_chk. */
12066 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12070 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12073 if (! len || TREE_SIDE_EFFECTS (len))
12076 /* If c_strlen returned something, but not a constant,
12077 transform __strcpy_chk into __memcpy_chk. */
12078 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12082 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12083 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12084 build_call_expr_loc (loc, fn, 4,
12085 dest, src, len, size));
12091 if (! tree_int_cst_lt (maxlen, size))
12095 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12096 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12097 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12101 return build_call_expr_loc (loc, fn, 2, dest, src);
12104 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12105 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12106 length passed as third argument. */
12109 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12110 tree len, tree size, tree maxlen)
12114 if (!validate_arg (dest, POINTER_TYPE)
12115 || !validate_arg (src, POINTER_TYPE)
12116 || !validate_arg (len, INTEGER_TYPE)
12117 || !validate_arg (size, INTEGER_TYPE))
12120 if (! host_integerp (size, 1))
12123 if (! integer_all_onesp (size))
12125 if (! host_integerp (len, 1))
12127 /* If LEN is not constant, try MAXLEN too.
12128 For MAXLEN only allow optimizing into non-_ocs function
12129 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12130 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12136 if (tree_int_cst_lt (size, maxlen))
12140 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12141 fn = built_in_decls[BUILT_IN_STRNCPY];
12145 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12148 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12149 are the arguments to the call. */
12152 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12153 tree src, tree size)
12158 if (!validate_arg (dest, POINTER_TYPE)
12159 || !validate_arg (src, POINTER_TYPE)
12160 || !validate_arg (size, INTEGER_TYPE))
12163 p = c_getstr (src);
12164 /* If the SRC parameter is "", return DEST. */
12165 if (p && *p == '\0')
12166 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12168 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12171 /* If __builtin_strcat_chk is used, assume strcat is available. */
12172 fn = built_in_decls[BUILT_IN_STRCAT];
12176 return build_call_expr_loc (loc, fn, 2, dest, src);
12179 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12183 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12184 tree dest, tree src, tree len, tree size)
12189 if (!validate_arg (dest, POINTER_TYPE)
12190 || !validate_arg (src, POINTER_TYPE)
12191 || !validate_arg (size, INTEGER_TYPE)
12192 || !validate_arg (size, INTEGER_TYPE))
12195 p = c_getstr (src);
12196 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12197 if (p && *p == '\0')
12198 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12199 else if (integer_zerop (len))
12200 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12202 if (! host_integerp (size, 1))
12205 if (! integer_all_onesp (size))
12207 tree src_len = c_strlen (src, 1);
12209 && host_integerp (src_len, 1)
12210 && host_integerp (len, 1)
12211 && ! tree_int_cst_lt (len, src_len))
12213 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12214 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12218 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12223 /* If __builtin_strncat_chk is used, assume strncat is available. */
12224 fn = built_in_decls[BUILT_IN_STRNCAT];
12228 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12231 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12232 a normal call should be emitted rather than expanding the function
12233 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12236 fold_builtin_sprintf_chk (location_t loc, tree exp,
12237 enum built_in_function fcode)
12239 tree dest, size, len, fn, fmt, flag;
12240 const char *fmt_str;
12241 int nargs = call_expr_nargs (exp);
12243 /* Verify the required arguments in the original call. */
12246 dest = CALL_EXPR_ARG (exp, 0);
12247 if (!validate_arg (dest, POINTER_TYPE))
12249 flag = CALL_EXPR_ARG (exp, 1);
12250 if (!validate_arg (flag, INTEGER_TYPE))
12252 size = CALL_EXPR_ARG (exp, 2);
12253 if (!validate_arg (size, INTEGER_TYPE))
12255 fmt = CALL_EXPR_ARG (exp, 3);
12256 if (!validate_arg (fmt, POINTER_TYPE))
12259 if (! host_integerp (size, 1))
12264 if (!init_target_chars ())
12267 /* Check whether the format is a literal string constant. */
12268 fmt_str = c_getstr (fmt);
12269 if (fmt_str != NULL)
12271 /* If the format doesn't contain % args or %%, we know the size. */
12272 if (strchr (fmt_str, target_percent) == 0)
12274 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12275 len = build_int_cstu (size_type_node, strlen (fmt_str));
12277 /* If the format is "%s" and first ... argument is a string literal,
12278 we know the size too. */
12279 else if (fcode == BUILT_IN_SPRINTF_CHK
12280 && strcmp (fmt_str, target_percent_s) == 0)
12286 arg = CALL_EXPR_ARG (exp, 4);
12287 if (validate_arg (arg, POINTER_TYPE))
12289 len = c_strlen (arg, 1);
12290 if (! len || ! host_integerp (len, 1))
12297 if (! integer_all_onesp (size))
12299 if (! len || ! tree_int_cst_lt (len, size))
12303 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12304 or if format doesn't contain % chars or is "%s". */
12305 if (! integer_zerop (flag))
12307 if (fmt_str == NULL)
12309 if (strchr (fmt_str, target_percent) != NULL
12310 && strcmp (fmt_str, target_percent_s))
12314 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12315 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12316 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12320 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12323 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12324 a normal call should be emitted rather than expanding the function
12325 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12326 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12327 passed as second argument. */
12330 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12331 enum built_in_function fcode)
12333 tree dest, size, len, fn, fmt, flag;
12334 const char *fmt_str;
12336 /* Verify the required arguments in the original call. */
12337 if (call_expr_nargs (exp) < 5)
12339 dest = CALL_EXPR_ARG (exp, 0);
12340 if (!validate_arg (dest, POINTER_TYPE))
12342 len = CALL_EXPR_ARG (exp, 1);
12343 if (!validate_arg (len, INTEGER_TYPE))
12345 flag = CALL_EXPR_ARG (exp, 2);
12346 if (!validate_arg (flag, INTEGER_TYPE))
12348 size = CALL_EXPR_ARG (exp, 3);
12349 if (!validate_arg (size, INTEGER_TYPE))
12351 fmt = CALL_EXPR_ARG (exp, 4);
12352 if (!validate_arg (fmt, POINTER_TYPE))
12355 if (! host_integerp (size, 1))
12358 if (! integer_all_onesp (size))
12360 if (! host_integerp (len, 1))
12362 /* If LEN is not constant, try MAXLEN too.
12363 For MAXLEN only allow optimizing into non-_ocs function
12364 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12365 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12371 if (tree_int_cst_lt (size, maxlen))
12375 if (!init_target_chars ())
12378 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12379 or if format doesn't contain % chars or is "%s". */
12380 if (! integer_zerop (flag))
12382 fmt_str = c_getstr (fmt);
12383 if (fmt_str == NULL)
12385 if (strchr (fmt_str, target_percent) != NULL
12386 && strcmp (fmt_str, target_percent_s))
12390 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12392 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12393 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12397 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12400 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12401 FMT and ARG are the arguments to the call; we don't fold cases with
12402 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12404 Return NULL_TREE if no simplification was possible, otherwise return the
12405 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12406 code of the function to be simplified. */
12409 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12410 tree arg, bool ignore,
12411 enum built_in_function fcode)
12413 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12414 const char *fmt_str = NULL;
12416 /* If the return value is used, don't do the transformation. */
12420 /* Verify the required arguments in the original call. */
12421 if (!validate_arg (fmt, POINTER_TYPE))
12424 /* Check whether the format is a literal string constant. */
12425 fmt_str = c_getstr (fmt);
12426 if (fmt_str == NULL)
12429 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12431 /* If we're using an unlocked function, assume the other
12432 unlocked functions exist explicitly. */
12433 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12434 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12438 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12439 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12442 if (!init_target_chars ())
12445 if (strcmp (fmt_str, target_percent_s) == 0
12446 || strchr (fmt_str, target_percent) == NULL)
12450 if (strcmp (fmt_str, target_percent_s) == 0)
12452 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12455 if (!arg || !validate_arg (arg, POINTER_TYPE))
12458 str = c_getstr (arg);
12464 /* The format specifier doesn't contain any '%' characters. */
12465 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12471 /* If the string was "", printf does nothing. */
12472 if (str[0] == '\0')
12473 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12475 /* If the string has length of 1, call putchar. */
12476 if (str[1] == '\0')
12478 /* Given printf("c"), (where c is any one character,)
12479 convert "c"[0] to an int and pass that to the replacement
12481 newarg = build_int_cst (NULL_TREE, str[0]);
12483 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12487 /* If the string was "string\n", call puts("string"). */
12488 size_t len = strlen (str);
12489 if ((unsigned char)str[len - 1] == target_newline)
12491 /* Create a NUL-terminated string that's one char shorter
12492 than the original, stripping off the trailing '\n'. */
12493 char *newstr = XALLOCAVEC (char, len);
12494 memcpy (newstr, str, len - 1);
12495 newstr[len - 1] = 0;
12497 newarg = build_string_literal (len, newstr);
12499 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12502 /* We'd like to arrange to call fputs(string,stdout) here,
12503 but we need stdout and don't have a way to get it yet. */
12508 /* The other optimizations can be done only on the non-va_list variants. */
12509 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12512 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12513 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12515 if (!arg || !validate_arg (arg, POINTER_TYPE))
12518 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12521 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12522 else if (strcmp (fmt_str, target_percent_c) == 0)
12524 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12527 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12533 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12536 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12537 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12538 more than 3 arguments, and ARG may be null in the 2-argument case.
12540 Return NULL_TREE if no simplification was possible, otherwise return the
12541 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12542 code of the function to be simplified. */
12545 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12546 tree fmt, tree arg, bool ignore,
12547 enum built_in_function fcode)
12549 tree fn_fputc, fn_fputs, call = NULL_TREE;
12550 const char *fmt_str = NULL;
12552 /* If the return value is used, don't do the transformation. */
12556 /* Verify the required arguments in the original call. */
12557 if (!validate_arg (fp, POINTER_TYPE))
12559 if (!validate_arg (fmt, POINTER_TYPE))
12562 /* Check whether the format is a literal string constant. */
12563 fmt_str = c_getstr (fmt);
12564 if (fmt_str == NULL)
12567 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12569 /* If we're using an unlocked function, assume the other
12570 unlocked functions exist explicitly. */
12571 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12572 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12576 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12577 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12580 if (!init_target_chars ())
12583 /* If the format doesn't contain % args or %%, use strcpy. */
12584 if (strchr (fmt_str, target_percent) == NULL)
12586 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12590 /* If the format specifier was "", fprintf does nothing. */
12591 if (fmt_str[0] == '\0')
12593 /* If FP has side-effects, just wait until gimplification is
12595 if (TREE_SIDE_EFFECTS (fp))
12598 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12601 /* When "string" doesn't contain %, replace all cases of
12602 fprintf (fp, string) with fputs (string, fp). The fputs
12603 builtin will take care of special cases like length == 1. */
12605 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12608 /* The other optimizations can be done only on the non-va_list variants. */
12609 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12612 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12613 else if (strcmp (fmt_str, target_percent_s) == 0)
12615 if (!arg || !validate_arg (arg, POINTER_TYPE))
12618 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12621 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12622 else if (strcmp (fmt_str, target_percent_c) == 0)
12624 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12627 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12632 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12635 /* Initialize format string characters in the target charset. */
12638 init_target_chars (void)
12643 target_newline = lang_hooks.to_target_charset ('\n');
12644 target_percent = lang_hooks.to_target_charset ('%');
12645 target_c = lang_hooks.to_target_charset ('c');
12646 target_s = lang_hooks.to_target_charset ('s');
12647 if (target_newline == 0 || target_percent == 0 || target_c == 0
12651 target_percent_c[0] = target_percent;
12652 target_percent_c[1] = target_c;
12653 target_percent_c[2] = '\0';
12655 target_percent_s[0] = target_percent;
12656 target_percent_s[1] = target_s;
12657 target_percent_s[2] = '\0';
12659 target_percent_s_newline[0] = target_percent;
12660 target_percent_s_newline[1] = target_s;
12661 target_percent_s_newline[2] = target_newline;
12662 target_percent_s_newline[3] = '\0';
12669 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12670 and no overflow/underflow occurred. INEXACT is true if M was not
12671 exactly calculated. TYPE is the tree type for the result. This
12672 function assumes that you cleared the MPFR flags and then
12673 calculated M to see if anything subsequently set a flag prior to
12674 entering this function. Return NULL_TREE if any checks fail. */
12677 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12679 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12680 overflow/underflow occurred. If -frounding-math, proceed iff the
12681 result of calling FUNC was exact. */
12682 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12683 && (!flag_rounding_math || !inexact))
12685 REAL_VALUE_TYPE rr;
12687 real_from_mpfr (&rr, m, type, GMP_RNDN);
12688 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12689 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12690 but the mpft_t is not, then we underflowed in the
12692 if (real_isfinite (&rr)
12693 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12695 REAL_VALUE_TYPE rmode;
12697 real_convert (&rmode, TYPE_MODE (type), &rr);
12698 /* Proceed iff the specified mode can hold the value. */
12699 if (real_identical (&rmode, &rr))
12700 return build_real (type, rmode);
12706 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12707 number and no overflow/underflow occurred. INEXACT is true if M
12708 was not exactly calculated. TYPE is the tree type for the result.
12709 This function assumes that you cleared the MPFR flags and then
12710 calculated M to see if anything subsequently set a flag prior to
12711 entering this function. Return NULL_TREE if any checks fail, if
12712 FORCE_CONVERT is true, then bypass the checks. */
12715 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12717 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12718 overflow/underflow occurred. If -frounding-math, proceed iff the
12719 result of calling FUNC was exact. */
12721 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12722 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12723 && (!flag_rounding_math || !inexact)))
12725 REAL_VALUE_TYPE re, im;
12727 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12728 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12729 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12730 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12731 but the mpft_t is not, then we underflowed in the
12734 || (real_isfinite (&re) && real_isfinite (&im)
12735 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12736 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12738 REAL_VALUE_TYPE re_mode, im_mode;
12740 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12741 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12742 /* Proceed iff the specified mode can hold the value. */
12744 || (real_identical (&re_mode, &re)
12745 && real_identical (&im_mode, &im)))
12746 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12747 build_real (TREE_TYPE (type), im_mode));
12753 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12754 FUNC on it and return the resulting value as a tree with type TYPE.
12755 If MIN and/or MAX are not NULL, then the supplied ARG must be
12756 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12757 acceptable values, otherwise they are not. The mpfr precision is
12758 set to the precision of TYPE. We assume that function FUNC returns
12759 zero if the result could be calculated exactly within the requested
12763 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12764 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12767 tree result = NULL_TREE;
12771 /* To proceed, MPFR must exactly represent the target floating point
12772 format, which only happens when the target base equals two. */
12773 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12774 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12776 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12778 if (real_isfinite (ra)
12779 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12780 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12782 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12783 const int prec = fmt->p;
12784 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12788 mpfr_init2 (m, prec);
12789 mpfr_from_real (m, ra, GMP_RNDN);
12790 mpfr_clear_flags ();
12791 inexact = func (m, m, rnd);
12792 result = do_mpfr_ckconv (m, type, inexact);
12800 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12801 FUNC on it and return the resulting value as a tree with type TYPE.
12802 The mpfr precision is set to the precision of TYPE. We assume that
12803 function FUNC returns zero if the result could be calculated
12804 exactly within the requested precision. */
12807 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12808 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12810 tree result = NULL_TREE;
12815 /* To proceed, MPFR must exactly represent the target floating point
12816 format, which only happens when the target base equals two. */
12817 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12818 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12819 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12821 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12822 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12824 if (real_isfinite (ra1) && real_isfinite (ra2))
12826 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12827 const int prec = fmt->p;
12828 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12832 mpfr_inits2 (prec, m1, m2, NULL);
12833 mpfr_from_real (m1, ra1, GMP_RNDN);
12834 mpfr_from_real (m2, ra2, GMP_RNDN);
12835 mpfr_clear_flags ();
12836 inexact = func (m1, m1, m2, rnd);
12837 result = do_mpfr_ckconv (m1, type, inexact);
12838 mpfr_clears (m1, m2, NULL);
12845 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12846 FUNC on it and return the resulting value as a tree with type TYPE.
12847 The mpfr precision is set to the precision of TYPE. We assume that
12848 function FUNC returns zero if the result could be calculated
12849 exactly within the requested precision. */
12852 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12853 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12855 tree result = NULL_TREE;
12861 /* To proceed, MPFR must exactly represent the target floating point
12862 format, which only happens when the target base equals two. */
12863 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12864 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12865 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12866 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12868 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12869 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12870 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12872 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12874 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12875 const int prec = fmt->p;
12876 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12880 mpfr_inits2 (prec, m1, m2, m3, NULL);
12881 mpfr_from_real (m1, ra1, GMP_RNDN);
12882 mpfr_from_real (m2, ra2, GMP_RNDN);
12883 mpfr_from_real (m3, ra3, GMP_RNDN);
12884 mpfr_clear_flags ();
12885 inexact = func (m1, m1, m2, m3, rnd);
12886 result = do_mpfr_ckconv (m1, type, inexact);
12887 mpfr_clears (m1, m2, m3, NULL);
12894 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12895 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12896 If ARG_SINP and ARG_COSP are NULL then the result is returned
12897 as a complex value.
12898 The type is taken from the type of ARG and is used for setting the
12899 precision of the calculation and results. */
12902 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12904 tree const type = TREE_TYPE (arg);
12905 tree result = NULL_TREE;
12909 /* To proceed, MPFR must exactly represent the target floating point
12910 format, which only happens when the target base equals two. */
12911 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12912 && TREE_CODE (arg) == REAL_CST
12913 && !TREE_OVERFLOW (arg))
12915 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12917 if (real_isfinite (ra))
12919 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12920 const int prec = fmt->p;
12921 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12922 tree result_s, result_c;
12926 mpfr_inits2 (prec, m, ms, mc, NULL);
12927 mpfr_from_real (m, ra, GMP_RNDN);
12928 mpfr_clear_flags ();
12929 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12930 result_s = do_mpfr_ckconv (ms, type, inexact);
12931 result_c = do_mpfr_ckconv (mc, type, inexact);
12932 mpfr_clears (m, ms, mc, NULL);
12933 if (result_s && result_c)
12935 /* If we are to return in a complex value do so. */
12936 if (!arg_sinp && !arg_cosp)
12937 return build_complex (build_complex_type (type),
12938 result_c, result_s);
12940 /* Dereference the sin/cos pointer arguments. */
12941 arg_sinp = build_fold_indirect_ref (arg_sinp);
12942 arg_cosp = build_fold_indirect_ref (arg_cosp);
12943 /* Proceed if valid pointer type were passed in. */
12944 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12945 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12947 /* Set the values. */
12948 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12950 TREE_SIDE_EFFECTS (result_s) = 1;
12951 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12953 TREE_SIDE_EFFECTS (result_c) = 1;
12954 /* Combine the assignments into a compound expr. */
12955 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12956 result_s, result_c));
12964 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12965 two-argument mpfr order N Bessel function FUNC on them and return
12966 the resulting value as a tree with type TYPE. The mpfr precision
12967 is set to the precision of TYPE. We assume that function FUNC
12968 returns zero if the result could be calculated exactly within the
12969 requested precision. */
12971 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12972 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12973 const REAL_VALUE_TYPE *min, bool inclusive)
12975 tree result = NULL_TREE;
12980 /* To proceed, MPFR must exactly represent the target floating point
12981 format, which only happens when the target base equals two. */
12982 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12983 && host_integerp (arg1, 0)
12984 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12986 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12987 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12990 && real_isfinite (ra)
12991 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12993 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12994 const int prec = fmt->p;
12995 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12999 mpfr_init2 (m, prec);
13000 mpfr_from_real (m, ra, GMP_RNDN);
13001 mpfr_clear_flags ();
13002 inexact = func (m, n, m, rnd);
13003 result = do_mpfr_ckconv (m, type, inexact);
13011 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13012 the pointer *(ARG_QUO) and return the result. The type is taken
13013 from the type of ARG0 and is used for setting the precision of the
13014 calculation and results. */
13017 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13019 tree const type = TREE_TYPE (arg0);
13020 tree result = NULL_TREE;
13025 /* To proceed, MPFR must exactly represent the target floating point
13026 format, which only happens when the target base equals two. */
13027 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13028 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13029 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13031 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13032 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13034 if (real_isfinite (ra0) && real_isfinite (ra1))
13036 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13037 const int prec = fmt->p;
13038 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13043 mpfr_inits2 (prec, m0, m1, NULL);
13044 mpfr_from_real (m0, ra0, GMP_RNDN);
13045 mpfr_from_real (m1, ra1, GMP_RNDN);
13046 mpfr_clear_flags ();
13047 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13048 /* Remquo is independent of the rounding mode, so pass
13049 inexact=0 to do_mpfr_ckconv(). */
13050 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13051 mpfr_clears (m0, m1, NULL);
13054 /* MPFR calculates quo in the host's long so it may
13055 return more bits in quo than the target int can hold
13056 if sizeof(host long) > sizeof(target int). This can
13057 happen even for native compilers in LP64 mode. In
13058 these cases, modulo the quo value with the largest
13059 number that the target int can hold while leaving one
13060 bit for the sign. */
13061 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13062 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13064 /* Dereference the quo pointer argument. */
13065 arg_quo = build_fold_indirect_ref (arg_quo);
13066 /* Proceed iff a valid pointer type was passed in. */
13067 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13069 /* Set the value. */
13070 tree result_quo = fold_build2 (MODIFY_EXPR,
13071 TREE_TYPE (arg_quo), arg_quo,
13072 build_int_cst (NULL, integer_quo));
13073 TREE_SIDE_EFFECTS (result_quo) = 1;
13074 /* Combine the quo assignment with the rem. */
13075 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13076 result_quo, result_rem));
13084 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13085 resulting value as a tree with type TYPE. The mpfr precision is
13086 set to the precision of TYPE. We assume that this mpfr function
13087 returns zero if the result could be calculated exactly within the
13088 requested precision. In addition, the integer pointer represented
13089 by ARG_SG will be dereferenced and set to the appropriate signgam
13093 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13095 tree result = NULL_TREE;
13099 /* To proceed, MPFR must exactly represent the target floating point
13100 format, which only happens when the target base equals two. Also
13101 verify ARG is a constant and that ARG_SG is an int pointer. */
13102 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13103 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13104 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13105 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13107 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13109 /* In addition to NaN and Inf, the argument cannot be zero or a
13110 negative integer. */
13111 if (real_isfinite (ra)
13112 && ra->cl != rvc_zero
13113 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13115 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13116 const int prec = fmt->p;
13117 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13122 mpfr_init2 (m, prec);
13123 mpfr_from_real (m, ra, GMP_RNDN);
13124 mpfr_clear_flags ();
13125 inexact = mpfr_lgamma (m, &sg, m, rnd);
13126 result_lg = do_mpfr_ckconv (m, type, inexact);
13132 /* Dereference the arg_sg pointer argument. */
13133 arg_sg = build_fold_indirect_ref (arg_sg);
13134 /* Assign the signgam value into *arg_sg. */
13135 result_sg = fold_build2 (MODIFY_EXPR,
13136 TREE_TYPE (arg_sg), arg_sg,
13137 build_int_cst (NULL, sg));
13138 TREE_SIDE_EFFECTS (result_sg) = 1;
13139 /* Combine the signgam assignment with the lgamma result. */
13140 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13141 result_sg, result_lg));
13149 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13150 function FUNC on it and return the resulting value as a tree with
13151 type TYPE. The mpfr precision is set to the precision of TYPE. We
13152 assume that function FUNC returns zero if the result could be
13153 calculated exactly within the requested precision. */
13156 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13158 tree result = NULL_TREE;
13162 /* To proceed, MPFR must exactly represent the target floating point
13163 format, which only happens when the target base equals two. */
13164 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13165 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13166 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13168 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13169 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13171 if (real_isfinite (re) && real_isfinite (im))
13173 const struct real_format *const fmt =
13174 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13175 const int prec = fmt->p;
13176 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13177 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13181 mpc_init2 (m, prec);
13182 mpfr_from_real (mpc_realref(m), re, rnd);
13183 mpfr_from_real (mpc_imagref(m), im, rnd);
13184 mpfr_clear_flags ();
13185 inexact = func (m, m, crnd);
13186 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13194 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13195 mpc function FUNC on it and return the resulting value as a tree
13196 with type TYPE. The mpfr precision is set to the precision of
13197 TYPE. We assume that function FUNC returns zero if the result
13198 could be calculated exactly within the requested precision. If
13199 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13200 in the arguments and/or results. */
13203 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13204 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13206 tree result = NULL_TREE;
13211 /* To proceed, MPFR must exactly represent the target floating point
13212 format, which only happens when the target base equals two. */
13213 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13214 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13215 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13216 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13217 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13219 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13220 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13221 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13222 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13225 || (real_isfinite (re0) && real_isfinite (im0)
13226 && real_isfinite (re1) && real_isfinite (im1)))
13228 const struct real_format *const fmt =
13229 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13230 const int prec = fmt->p;
13231 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13232 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13236 mpc_init2 (m0, prec);
13237 mpc_init2 (m1, prec);
13238 mpfr_from_real (mpc_realref(m0), re0, rnd);
13239 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13240 mpfr_from_real (mpc_realref(m1), re1, rnd);
13241 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13242 mpfr_clear_flags ();
13243 inexact = func (m0, m0, m1, crnd);
13244 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13254 The functions below provide an alternate interface for folding
13255 builtin function calls presented as GIMPLE_CALL statements rather
13256 than as CALL_EXPRs. The folded result is still expressed as a
13257 tree. There is too much code duplication in the handling of
13258 varargs functions, and a more intrusive re-factoring would permit
13259 better sharing of code between the tree and statement-based
13260 versions of these functions. */
13262 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13263 along with N new arguments specified as the "..." parameters. SKIP
13264 is the number of arguments in STMT to be omitted. This function is used
13265 to do varargs-to-varargs transformations. */
13268 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13270 int oldnargs = gimple_call_num_args (stmt);
13271 int nargs = oldnargs - skip + n;
13272 tree fntype = TREE_TYPE (fndecl);
13273 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13277 location_t loc = gimple_location (stmt);
13279 buffer = XALLOCAVEC (tree, nargs);
13281 for (i = 0; i < n; i++)
13282 buffer[i] = va_arg (ap, tree);
13284 for (j = skip; j < oldnargs; j++, i++)
13285 buffer[i] = gimple_call_arg (stmt, j);
13287 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13290 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13291 a normal call should be emitted rather than expanding the function
13292 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13295 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13297 tree dest, size, len, fn, fmt, flag;
13298 const char *fmt_str;
13299 int nargs = gimple_call_num_args (stmt);
13301 /* Verify the required arguments in the original call. */
13304 dest = gimple_call_arg (stmt, 0);
13305 if (!validate_arg (dest, POINTER_TYPE))
13307 flag = gimple_call_arg (stmt, 1);
13308 if (!validate_arg (flag, INTEGER_TYPE))
13310 size = gimple_call_arg (stmt, 2);
13311 if (!validate_arg (size, INTEGER_TYPE))
13313 fmt = gimple_call_arg (stmt, 3);
13314 if (!validate_arg (fmt, POINTER_TYPE))
13317 if (! host_integerp (size, 1))
13322 if (!init_target_chars ())
13325 /* Check whether the format is a literal string constant. */
13326 fmt_str = c_getstr (fmt);
13327 if (fmt_str != NULL)
13329 /* If the format doesn't contain % args or %%, we know the size. */
13330 if (strchr (fmt_str, target_percent) == 0)
13332 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13333 len = build_int_cstu (size_type_node, strlen (fmt_str));
13335 /* If the format is "%s" and first ... argument is a string literal,
13336 we know the size too. */
13337 else if (fcode == BUILT_IN_SPRINTF_CHK
13338 && strcmp (fmt_str, target_percent_s) == 0)
13344 arg = gimple_call_arg (stmt, 4);
13345 if (validate_arg (arg, POINTER_TYPE))
13347 len = c_strlen (arg, 1);
13348 if (! len || ! host_integerp (len, 1))
13355 if (! integer_all_onesp (size))
13357 if (! len || ! tree_int_cst_lt (len, size))
13361 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13362 or if format doesn't contain % chars or is "%s". */
13363 if (! integer_zerop (flag))
13365 if (fmt_str == NULL)
13367 if (strchr (fmt_str, target_percent) != NULL
13368 && strcmp (fmt_str, target_percent_s))
13372 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13373 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13374 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13378 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13381 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13382 a normal call should be emitted rather than expanding the function
13383 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13384 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13385 passed as second argument. */
13388 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13389 enum built_in_function fcode)
13391 tree dest, size, len, fn, fmt, flag;
13392 const char *fmt_str;
13394 /* Verify the required arguments in the original call. */
13395 if (gimple_call_num_args (stmt) < 5)
13397 dest = gimple_call_arg (stmt, 0);
13398 if (!validate_arg (dest, POINTER_TYPE))
13400 len = gimple_call_arg (stmt, 1);
13401 if (!validate_arg (len, INTEGER_TYPE))
13403 flag = gimple_call_arg (stmt, 2);
13404 if (!validate_arg (flag, INTEGER_TYPE))
13406 size = gimple_call_arg (stmt, 3);
13407 if (!validate_arg (size, INTEGER_TYPE))
13409 fmt = gimple_call_arg (stmt, 4);
13410 if (!validate_arg (fmt, POINTER_TYPE))
13413 if (! host_integerp (size, 1))
13416 if (! integer_all_onesp (size))
13418 if (! host_integerp (len, 1))
13420 /* If LEN is not constant, try MAXLEN too.
13421 For MAXLEN only allow optimizing into non-_ocs function
13422 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13423 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13429 if (tree_int_cst_lt (size, maxlen))
13433 if (!init_target_chars ())
13436 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13437 or if format doesn't contain % chars or is "%s". */
13438 if (! integer_zerop (flag))
13440 fmt_str = c_getstr (fmt);
13441 if (fmt_str == NULL)
13443 if (strchr (fmt_str, target_percent) != NULL
13444 && strcmp (fmt_str, target_percent_s))
13448 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13450 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13451 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13455 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13458 /* Builtins with folding operations that operate on "..." arguments
13459 need special handling; we need to store the arguments in a convenient
13460 data structure before attempting any folding. Fortunately there are
13461 only a few builtins that fall into this category. FNDECL is the
13462 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13463 result of the function call is ignored. */
13466 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13467 bool ignore ATTRIBUTE_UNUSED)
13469 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13470 tree ret = NULL_TREE;
13474 case BUILT_IN_SPRINTF_CHK:
13475 case BUILT_IN_VSPRINTF_CHK:
13476 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13479 case BUILT_IN_SNPRINTF_CHK:
13480 case BUILT_IN_VSNPRINTF_CHK:
13481 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13488 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13489 TREE_NO_WARNING (ret) = 1;
13495 /* A wrapper function for builtin folding that prevents warnings for
13496 "statement without effect" and the like, caused by removing the
13497 call node earlier than the warning is generated. */
13500 fold_call_stmt (gimple stmt, bool ignore)
13502 tree ret = NULL_TREE;
13503 tree fndecl = gimple_call_fndecl (stmt);
13504 location_t loc = gimple_location (stmt);
13506 && TREE_CODE (fndecl) == FUNCTION_DECL
13507 && DECL_BUILT_IN (fndecl)
13508 && !gimple_call_va_arg_pack_p (stmt))
13510 int nargs = gimple_call_num_args (stmt);
13512 if (avoid_folding_inline_builtin (fndecl))
13514 /* FIXME: Don't use a list in this interface. */
13515 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13517 tree arglist = NULL_TREE;
13519 for (i = nargs - 1; i >= 0; i--)
13520 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13521 return targetm.fold_builtin (fndecl, arglist, ignore);
13525 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13527 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13529 for (i = 0; i < nargs; i++)
13530 args[i] = gimple_call_arg (stmt, i);
13531 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13534 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13537 /* Propagate location information from original call to
13538 expansion of builtin. Otherwise things like
13539 maybe_emit_chk_warning, that operate on the expansion
13540 of a builtin, will use the wrong location information. */
13541 if (gimple_has_location (stmt))
13543 tree realret = ret;
13544 if (TREE_CODE (ret) == NOP_EXPR)
13545 realret = TREE_OPERAND (ret, 0);
13546 if (CAN_HAVE_LOCATION_P (realret)
13547 && !EXPR_HAS_LOCATION (realret))
13548 SET_EXPR_LOCATION (realret, loc);
13558 /* Look up the function in built_in_decls that corresponds to DECL
13559 and set ASMSPEC as its user assembler name. DECL must be a
13560 function decl that declares a builtin. */
13563 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13566 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13567 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13570 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13571 set_user_assembler_name (builtin, asmspec);
13572 switch (DECL_FUNCTION_CODE (decl))
13574 case BUILT_IN_MEMCPY:
13575 init_block_move_fn (asmspec);
13576 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13578 case BUILT_IN_MEMSET:
13579 init_block_clear_fn (asmspec);
13580 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13582 case BUILT_IN_MEMMOVE:
13583 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13585 case BUILT_IN_MEMCMP:
13586 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13588 case BUILT_IN_ABORT:
13589 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);