1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 align = MIN (inner, DECL_ALIGN (exp));
330 #ifdef CONSTANT_ALIGNMENT
331 else if (CONSTANT_CLASS_P (exp))
332 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
335 || TREE_CODE (exp) == INDIRECT_REF)
336 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 align = MIN (align, inner);
339 return MIN (align, max_align);
342 /* Returns true iff we can trust that alignment information has been
343 calculated properly. */
346 can_trust_pointer_alignment (void)
348 /* We rely on TER to compute accurate alignment information. */
349 return (optimize && flag_tree_ter);
352 /* Return the alignment in bits of EXP, a pointer valued expression.
353 But don't return more than MAX_ALIGN no matter what.
354 The alignment returned is, by default, the alignment of the thing that
355 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
357 Otherwise, look at the expression to see if we can do better, i.e., if the
358 expression is actually pointing at an object whose alignment is tighter. */
361 get_pointer_alignment (tree exp, unsigned int max_align)
363 unsigned int align, inner;
365 if (!can_trust_pointer_alignment ())
368 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
371 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
372 align = MIN (align, max_align);
376 switch (TREE_CODE (exp))
379 exp = TREE_OPERAND (exp, 0);
380 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
383 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
384 align = MIN (inner, max_align);
387 case POINTER_PLUS_EXPR:
388 /* If sum of pointer + int, restrict our maximum alignment to that
389 imposed by the integer. If not, we can't do any better than
391 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
394 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
395 & (max_align / BITS_PER_UNIT - 1))
399 exp = TREE_OPERAND (exp, 0);
403 /* See what we are pointing at and look at its alignment. */
404 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
412 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
413 way, because it could contain a zero byte in the middle.
414 TREE_STRING_LENGTH is the size of the character array, not the string.
416 ONLY_VALUE should be nonzero if the result is not going to be emitted
417 into the instruction stream and zero if it is going to be expanded.
418 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
419 is returned, otherwise NULL, since
420 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
421 evaluate the side-effects.
423 The value returned is of type `ssizetype'.
425 Unfortunately, string_constant can't access the values of const char
426 arrays with initializers, so neither can we do so here. */
429 c_strlen (tree src, int only_value)
432 HOST_WIDE_INT offset;
437 if (TREE_CODE (src) == COND_EXPR
438 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
443 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
444 if (tree_int_cst_equal (len1, len2))
448 if (TREE_CODE (src) == COMPOUND_EXPR
449 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
450 return c_strlen (TREE_OPERAND (src, 1), only_value);
452 src = string_constant (src, &offset_node);
456 max = TREE_STRING_LENGTH (src) - 1;
457 ptr = TREE_STRING_POINTER (src);
459 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
461 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
462 compute the offset to the following null if we don't know where to
463 start searching for it. */
466 for (i = 0; i < max; i++)
470 /* We don't know the starting offset, but we do know that the string
471 has no internal zero bytes. We can assume that the offset falls
472 within the bounds of the string; otherwise, the programmer deserves
473 what he gets. Subtract the offset from the length of the string,
474 and return that. This would perhaps not be valid if we were dealing
475 with named arrays in addition to literal string constants. */
477 return size_diffop_loc (input_location, size_int (max), offset_node);
480 /* We have a known offset into the string. Start searching there for
481 a null character if we can represent it as a single HOST_WIDE_INT. */
482 if (offset_node == 0)
484 else if (! host_integerp (offset_node, 0))
487 offset = tree_low_cst (offset_node, 0);
489 /* If the offset is known to be out of bounds, warn, and call strlen at
491 if (offset < 0 || offset > max)
493 /* Suppress multiple warnings for propagated constant strings. */
494 if (! TREE_NO_WARNING (src))
496 warning (0, "offset outside bounds of constant string");
497 TREE_NO_WARNING (src) = 1;
502 /* Use strlen to search for the first zero byte. Since any strings
503 constructed with build_string will have nulls appended, we win even
504 if we get handed something like (char[4])"abcd".
506 Since OFFSET is our starting index into the string, no further
507 calculation is needed. */
508 return ssize_int (strlen (ptr + offset));
511 /* Return a char pointer for a C string if it is a string constant
512 or sum of string constant and integer constant. */
519 src = string_constant (src, &offset_node);
523 if (offset_node == 0)
524 return TREE_STRING_POINTER (src);
525 else if (!host_integerp (offset_node, 1)
526 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
529 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
532 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
533 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
536 c_readstr (const char *str, enum machine_mode mode)
542 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
547 for (i = 0; i < GET_MODE_SIZE (mode); i++)
550 if (WORDS_BIG_ENDIAN)
551 j = GET_MODE_SIZE (mode) - i - 1;
552 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
553 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
554 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
556 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
559 ch = (unsigned char) str[i];
560 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
562 return immed_double_const (c[0], c[1], mode);
565 /* Cast a target constant CST to target CHAR and if that value fits into
566 host char type, return zero and put that value into variable pointed to by
570 target_char_cast (tree cst, char *p)
572 unsigned HOST_WIDE_INT val, hostval;
574 if (!host_integerp (cst, 1)
575 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
578 val = tree_low_cst (cst, 1);
579 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
580 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
583 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
584 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 /* Similar to save_expr, but assumes that arbitrary code is not executed
594 in between the multiple evaluations. In particular, we assume that a
595 non-addressable local variable will not be modified. */
598 builtin_save_expr (tree exp)
600 if (TREE_ADDRESSABLE (exp) == 0
601 && (TREE_CODE (exp) == PARM_DECL
602 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
605 return save_expr (exp);
608 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
609 times to get the address of either a higher stack frame, or a return
610 address located within it (depending on FNDECL_CODE). */
613 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
617 #ifdef INITIAL_FRAME_ADDRESS_RTX
618 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
622 /* For a zero count with __builtin_return_address, we don't care what
623 frame address we return, because target-specific definitions will
624 override us. Therefore frame pointer elimination is OK, and using
625 the soft frame pointer is OK.
627 For a nonzero count, or a zero count with __builtin_frame_address,
628 we require a stable offset from the current frame pointer to the
629 previous one, so we must use the hard frame pointer, and
630 we must disable frame pointer elimination. */
631 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
632 tem = frame_pointer_rtx;
635 tem = hard_frame_pointer_rtx;
637 /* Tell reload not to eliminate the frame pointer. */
638 crtl->accesses_prior_frames = 1;
642 /* Some machines need special handling before we can access
643 arbitrary frames. For example, on the SPARC, we must first flush
644 all register windows to the stack. */
645 #ifdef SETUP_FRAME_ADDRESSES
647 SETUP_FRAME_ADDRESSES ();
650 /* On the SPARC, the return address is not in the frame, it is in a
651 register. There is no way to access it off of the current frame
652 pointer, but it can be accessed off the previous frame pointer by
653 reading the value from the register window save area. */
654 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
655 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
659 /* Scan back COUNT frames to the specified frame. */
660 for (i = 0; i < count; i++)
662 /* Assume the dynamic chain pointer is in the word that the
663 frame address points to, unless otherwise specified. */
664 #ifdef DYNAMIC_CHAIN_ADDRESS
665 tem = DYNAMIC_CHAIN_ADDRESS (tem);
667 tem = memory_address (Pmode, tem);
668 tem = gen_frame_mem (Pmode, tem);
669 tem = copy_to_reg (tem);
672 /* For __builtin_frame_address, return what we've got. But, on
673 the SPARC for example, we may have to add a bias. */
674 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
675 #ifdef FRAME_ADDR_RTX
676 return FRAME_ADDR_RTX (tem);
681 /* For __builtin_return_address, get the return address from that frame. */
682 #ifdef RETURN_ADDR_RTX
683 tem = RETURN_ADDR_RTX (count, tem);
685 tem = memory_address (Pmode,
686 plus_constant (tem, GET_MODE_SIZE (Pmode)));
687 tem = gen_frame_mem (Pmode, tem);
692 /* Alias set used for setjmp buffer. */
693 static alias_set_type setjmp_alias_set = -1;
695 /* Construct the leading half of a __builtin_setjmp call. Control will
696 return to RECEIVER_LABEL. This is also called directly by the SJLJ
697 exception handling code. */
700 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
702 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
706 if (setjmp_alias_set == -1)
707 setjmp_alias_set = new_alias_set ();
709 buf_addr = convert_memory_address (Pmode, buf_addr);
711 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
713 /* We store the frame pointer and the address of receiver_label in
714 the buffer and use the rest of it for the stack save area, which
715 is machine-dependent. */
717 mem = gen_rtx_MEM (Pmode, buf_addr);
718 set_mem_alias_set (mem, setjmp_alias_set);
719 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
721 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
722 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (validize_mem (mem),
725 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
727 stack_save = gen_rtx_MEM (sa_mode,
728 plus_constant (buf_addr,
729 2 * GET_MODE_SIZE (Pmode)));
730 set_mem_alias_set (stack_save, setjmp_alias_set);
731 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
733 /* If there is further processing to do, do it. */
734 #ifdef HAVE_builtin_setjmp_setup
735 if (HAVE_builtin_setjmp_setup)
736 emit_insn (gen_builtin_setjmp_setup (buf_addr));
739 /* Tell optimize_save_area_alloca that extra work is going to
740 need to go on during alloca. */
741 cfun->calls_setjmp = 1;
743 /* We have a nonlocal label. */
744 cfun->has_nonlocal_label = 1;
747 /* Construct the trailing part of a __builtin_setjmp call. This is
748 also called directly by the SJLJ exception handling code. */
751 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
755 /* Clobber the FP when we get here, so we have to make sure it's
756 marked as used by this function. */
757 emit_use (hard_frame_pointer_rtx);
759 /* Mark the static chain as clobbered here so life information
760 doesn't get messed up for it. */
761 chain = targetm.calls.static_chain (current_function_decl, true);
762 if (chain && REG_P (chain))
763 emit_clobber (chain);
765 /* Now put in the code to restore the frame pointer, and argument
766 pointer, if needed. */
767 #ifdef HAVE_nonlocal_goto
768 if (! HAVE_nonlocal_goto)
771 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
772 /* This might change the hard frame pointer in ways that aren't
773 apparent to early optimization passes, so force a clobber. */
774 emit_clobber (hard_frame_pointer_rtx);
777 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
778 if (fixed_regs[ARG_POINTER_REGNUM])
780 #ifdef ELIMINABLE_REGS
782 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
784 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
785 if (elim_regs[i].from == ARG_POINTER_REGNUM
786 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
789 if (i == ARRAY_SIZE (elim_regs))
792 /* Now restore our arg pointer from the address at which it
793 was saved in our stack frame. */
794 emit_move_insn (crtl->args.internal_arg_pointer,
795 copy_to_reg (get_arg_pointer_save_area ()));
800 #ifdef HAVE_builtin_setjmp_receiver
801 if (HAVE_builtin_setjmp_receiver)
802 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
805 #ifdef HAVE_nonlocal_goto_receiver
806 if (HAVE_nonlocal_goto_receiver)
807 emit_insn (gen_nonlocal_goto_receiver ());
812 /* We must not allow the code we just generated to be reordered by
813 scheduling. Specifically, the update of the frame pointer must
814 happen immediately, not later. */
815 emit_insn (gen_blockage ());
818 /* __builtin_longjmp is passed a pointer to an array of five words (not
819 all will be used on all machines). It operates similarly to the C
820 library function of the same name, but is more efficient. Much of
821 the code below is copied from the handling of non-local gotos. */
824 expand_builtin_longjmp (rtx buf_addr, rtx value)
826 rtx fp, lab, stack, insn, last;
827 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829 /* DRAP is needed for stack realign if longjmp is expanded to current
831 if (SUPPORTS_STACK_ALIGNMENT)
832 crtl->need_drap = true;
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
837 buf_addr = convert_memory_address (Pmode, buf_addr);
839 buf_addr = force_reg (Pmode, buf_addr);
841 /* We require that the user must pass a second argument of 1, because
842 that is what builtin_setjmp will return. */
843 gcc_assert (value == const1_rtx);
845 last = get_last_insn ();
846 #ifdef HAVE_builtin_longjmp
847 if (HAVE_builtin_longjmp)
848 emit_insn (gen_builtin_longjmp (buf_addr));
852 fp = gen_rtx_MEM (Pmode, buf_addr);
853 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
854 GET_MODE_SIZE (Pmode)));
856 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
857 2 * GET_MODE_SIZE (Pmode)));
858 set_mem_alias_set (fp, setjmp_alias_set);
859 set_mem_alias_set (lab, setjmp_alias_set);
860 set_mem_alias_set (stack, setjmp_alias_set);
862 /* Pick up FP, label, and SP from the block and jump. This code is
863 from expand_goto in stmt.c; see there for detailed comments. */
864 #ifdef HAVE_nonlocal_goto
865 if (HAVE_nonlocal_goto)
866 /* We have to pass a value to the nonlocal_goto pattern that will
867 get copied into the static_chain pointer, but it does not matter
868 what that value is, because builtin_setjmp does not use it. */
869 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
873 lab = copy_to_reg (lab);
875 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
876 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
878 emit_move_insn (hard_frame_pointer_rtx, fp);
879 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
881 emit_use (hard_frame_pointer_rtx);
882 emit_use (stack_pointer_rtx);
883 emit_indirect_jump (lab);
887 /* Search backwards and mark the jump insn as a non-local goto.
888 Note that this precludes the use of __builtin_longjmp to a
889 __builtin_setjmp target in the same function. However, we've
890 already cautioned the user that these functions are for
891 internal exception handling use only. */
892 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
894 gcc_assert (insn != last);
898 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
901 else if (CALL_P (insn))
906 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
907 and the address of the save area. */
910 expand_builtin_nonlocal_goto (tree exp)
912 tree t_label, t_save_area;
913 rtx r_label, r_save_area, r_fp, r_sp, insn;
915 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
918 t_label = CALL_EXPR_ARG (exp, 0);
919 t_save_area = CALL_EXPR_ARG (exp, 1);
921 r_label = expand_normal (t_label);
922 r_label = convert_memory_address (Pmode, r_label);
923 r_save_area = expand_normal (t_save_area);
924 r_save_area = convert_memory_address (Pmode, r_save_area);
925 /* Copy the address of the save location to a register just in case it was based
926 on the frame pointer. */
927 r_save_area = copy_to_reg (r_save_area);
928 r_fp = gen_rtx_MEM (Pmode, r_save_area);
929 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
930 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
932 crtl->has_nonlocal_goto = 1;
934 #ifdef HAVE_nonlocal_goto
935 /* ??? We no longer need to pass the static chain value, afaik. */
936 if (HAVE_nonlocal_goto)
937 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
941 r_label = copy_to_reg (r_label);
943 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
944 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
946 /* Restore frame pointer for containing function.
947 This sets the actual hard register used for the frame pointer
948 to the location of the function's incoming static chain info.
949 The non-local goto handler will then adjust it to contain the
950 proper value and reload the argument pointer, if needed. */
951 emit_move_insn (hard_frame_pointer_rtx, r_fp);
952 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
954 /* USE of hard_frame_pointer_rtx added for consistency;
955 not clear if really needed. */
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
959 /* If the architecture is using a GP register, we must
960 conservatively assume that the target function makes use of it.
961 The prologue of functions with nonlocal gotos must therefore
962 initialize the GP register to the appropriate value, and we
963 must then make sure that this value is live at the point
964 of the jump. (Note that this doesn't necessarily apply
965 to targets with a nonlocal_goto pattern; they are free
966 to implement it in their own way. Note also that this is
967 a no-op if the GP register is a global invariant.) */
968 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
969 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
970 emit_use (pic_offset_table_rtx);
972 emit_indirect_jump (r_label);
975 /* Search backwards to the jump insn and mark it as a
977 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
981 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
984 else if (CALL_P (insn))
991 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
992 (not all will be used on all machines) that was passed to __builtin_setjmp.
993 It updates the stack pointer in that block to correspond to the current
997 expand_builtin_update_setjmp_buf (rtx buf_addr)
999 enum machine_mode sa_mode = Pmode;
1003 #ifdef HAVE_save_stack_nonlocal
1004 if (HAVE_save_stack_nonlocal)
1005 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1007 #ifdef STACK_SAVEAREA_MODE
1008 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1012 = gen_rtx_MEM (sa_mode,
1015 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1019 emit_insn (gen_setjmp ());
1022 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1025 /* Expand a call to __builtin_prefetch. For a target that does not support
1026 data prefetch, evaluate the memory address argument in case it has side
1030 expand_builtin_prefetch (tree exp)
1032 tree arg0, arg1, arg2;
1036 if (!validate_arglist (exp, POINTER_TYPE, 0))
1039 arg0 = CALL_EXPR_ARG (exp, 0);
1041 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1042 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1044 nargs = call_expr_nargs (exp);
1046 arg1 = CALL_EXPR_ARG (exp, 1);
1048 arg1 = integer_zero_node;
1050 arg2 = CALL_EXPR_ARG (exp, 2);
1052 arg2 = build_int_cst (NULL_TREE, 3);
1054 /* Argument 0 is an address. */
1055 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1057 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1058 if (TREE_CODE (arg1) != INTEGER_CST)
1060 error ("second argument to %<__builtin_prefetch%> must be a constant");
1061 arg1 = integer_zero_node;
1063 op1 = expand_normal (arg1);
1064 /* Argument 1 must be either zero or one. */
1065 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1067 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1072 /* Argument 2 (locality) must be a compile-time constant int. */
1073 if (TREE_CODE (arg2) != INTEGER_CST)
1075 error ("third argument to %<__builtin_prefetch%> must be a constant");
1076 arg2 = integer_zero_node;
1078 op2 = expand_normal (arg2);
1079 /* Argument 2 must be 0, 1, 2, or 3. */
1080 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1082 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1086 #ifdef HAVE_prefetch
1089 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1091 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1092 || (GET_MODE (op0) != Pmode))
1094 op0 = convert_memory_address (Pmode, op0);
1095 op0 = force_reg (Pmode, op0);
1097 emit_insn (gen_prefetch (op0, op1, op2));
1101 /* Don't do anything with direct references to volatile memory, but
1102 generate code to handle other side effects. */
1103 if (!MEM_P (op0) && side_effects_p (op0))
1107 /* Get a MEM rtx for expression EXP which is the address of an operand
1108 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1109 the maximum length of the block of memory that might be accessed or
1113 get_memory_rtx (tree exp, tree len)
1115 tree orig_exp = exp;
1119 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1120 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1121 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1122 exp = TREE_OPERAND (exp, 0);
1124 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1125 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1127 /* Get an expression we can use to find the attributes to assign to MEM.
1128 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1129 we can. First remove any nops. */
1130 while (CONVERT_EXPR_P (exp)
1131 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1132 exp = TREE_OPERAND (exp, 0);
1135 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1136 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1137 && host_integerp (TREE_OPERAND (exp, 1), 0)
1138 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1139 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1140 else if (TREE_CODE (exp) == ADDR_EXPR)
1141 exp = TREE_OPERAND (exp, 0);
1142 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1143 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1147 /* Honor attributes derived from exp, except for the alias set
1148 (as builtin stringops may alias with anything) and the size
1149 (as stringops may access multiple array elements). */
1152 set_mem_attributes (mem, exp, 0);
1155 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1157 /* Allow the string and memory builtins to overflow from one
1158 field into another, see http://gcc.gnu.org/PR23561.
1159 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1160 memory accessed by the string or memory builtin will fit
1161 within the field. */
1162 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1164 tree mem_expr = MEM_EXPR (mem);
1165 HOST_WIDE_INT offset = -1, length = -1;
1168 while (TREE_CODE (inner) == ARRAY_REF
1169 || CONVERT_EXPR_P (inner)
1170 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1171 || TREE_CODE (inner) == SAVE_EXPR)
1172 inner = TREE_OPERAND (inner, 0);
1174 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1176 if (MEM_OFFSET (mem)
1177 && CONST_INT_P (MEM_OFFSET (mem)))
1178 offset = INTVAL (MEM_OFFSET (mem));
1180 if (offset >= 0 && len && host_integerp (len, 0))
1181 length = tree_low_cst (len, 0);
1183 while (TREE_CODE (inner) == COMPONENT_REF)
1185 tree field = TREE_OPERAND (inner, 1);
1186 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1187 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1189 /* Bitfields are generally not byte-addressable. */
1190 gcc_assert (!DECL_BIT_FIELD (field)
1191 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1192 % BITS_PER_UNIT) == 0
1193 && host_integerp (DECL_SIZE (field), 0)
1194 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1195 % BITS_PER_UNIT) == 0));
1197 /* If we can prove that the memory starting at XEXP (mem, 0) and
1198 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1199 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1200 fields without DECL_SIZE_UNIT like flexible array members. */
1202 && DECL_SIZE_UNIT (field)
1203 && host_integerp (DECL_SIZE_UNIT (field), 0))
1206 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1209 && offset + length <= size)
1214 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1215 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1216 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 mem_expr = TREE_OPERAND (mem_expr, 0);
1225 inner = TREE_OPERAND (inner, 0);
1228 if (mem_expr == NULL)
1230 if (mem_expr != MEM_EXPR (mem))
1232 set_mem_expr (mem, mem_expr);
1233 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1236 set_mem_alias_set (mem, 0);
1237 set_mem_size (mem, NULL_RTX);
1243 /* Built-in functions to perform an untyped call and return. */
1245 /* For each register that may be used for calling a function, this
1246 gives a mode used to copy the register's value. VOIDmode indicates
1247 the register is not used for calling a function. If the machine
1248 has register windows, this gives only the outbound registers.
1249 INCOMING_REGNO gives the corresponding inbound register. */
1250 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1252 /* For each register that may be used for returning values, this gives
1253 a mode used to copy the register's value. VOIDmode indicates the
1254 register is not used for returning values. If the machine has
1255 register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1259 /* Return the size required for the block returned by __builtin_apply_args,
1260 and initialize apply_args_mode. */
1263 apply_args_size (void)
1265 static int size = -1;
1268 enum machine_mode mode;
1270 /* The values computed by this function never change. */
1273 /* The first value is the incoming arg-pointer. */
1274 size = GET_MODE_SIZE (Pmode);
1276 /* The second value is the structure value address unless this is
1277 passed as an "invisible" first argument. */
1278 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1279 size += GET_MODE_SIZE (Pmode);
1281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1282 if (FUNCTION_ARG_REGNO_P (regno))
1284 mode = reg_raw_mode[regno];
1286 gcc_assert (mode != VOIDmode);
1288 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1289 if (size % align != 0)
1290 size = CEIL (size, align) * align;
1291 size += GET_MODE_SIZE (mode);
1292 apply_args_mode[regno] = mode;
1296 apply_args_mode[regno] = VOIDmode;
1302 /* Return the size required for the block returned by __builtin_apply,
1303 and initialize apply_result_mode. */
1306 apply_result_size (void)
1308 static int size = -1;
1310 enum machine_mode mode;
1312 /* The values computed by this function never change. */
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if (FUNCTION_VALUE_REGNO_P (regno))
1320 mode = reg_raw_mode[regno];
1322 gcc_assert (mode != VOIDmode);
1324 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1325 if (size % align != 0)
1326 size = CEIL (size, align) * align;
1327 size += GET_MODE_SIZE (mode);
1328 apply_result_mode[regno] = mode;
1331 apply_result_mode[regno] = VOIDmode;
1333 /* Allow targets that use untyped_call and untyped_return to override
1334 the size so that machine-specific information can be stored here. */
1335 #ifdef APPLY_RESULT_SIZE
1336 size = APPLY_RESULT_SIZE;
1342 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1343 /* Create a vector describing the result block RESULT. If SAVEP is true,
1344 the result block is used to save the values; otherwise it is used to
1345 restore the values. */
1348 result_vector (int savep, rtx result)
1350 int regno, size, align, nelts;
1351 enum machine_mode mode;
1353 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1356 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1357 if ((mode = apply_result_mode[regno]) != VOIDmode)
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1363 mem = adjust_address (result, mode, size);
1364 savevec[nelts++] = (savep
1365 ? gen_rtx_SET (VOIDmode, mem, reg)
1366 : gen_rtx_SET (VOIDmode, reg, mem));
1367 size += GET_MODE_SIZE (mode);
1369 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1371 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1373 /* Save the state required to perform an untyped call with the same
1374 arguments as were passed to the current function. */
1377 expand_builtin_apply_args_1 (void)
1380 int size, align, regno;
1381 enum machine_mode mode;
1382 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1384 /* Create a block where the arg-pointer, structure value address,
1385 and argument registers can be saved. */
1386 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1388 /* Walk past the arg-pointer and structure value address. */
1389 size = GET_MODE_SIZE (Pmode);
1390 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1391 size += GET_MODE_SIZE (Pmode);
1393 /* Save each register used in calling a function to the block. */
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if ((mode = apply_args_mode[regno]) != VOIDmode)
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1401 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1403 emit_move_insn (adjust_address (registers, mode, size), tem);
1404 size += GET_MODE_SIZE (mode);
1407 /* Save the arg pointer to the block. */
1408 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1409 #ifdef STACK_GROWS_DOWNWARD
1410 /* We need the pointer as the caller actually passed them to us, not
1411 as we might have pretended they were passed. Make sure it's a valid
1412 operand, as emit_move_insn isn't expected to handle a PLUS. */
1414 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1417 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1419 size = GET_MODE_SIZE (Pmode);
1421 /* Save the structure value address unless this is passed as an
1422 "invisible" first argument. */
1423 if (struct_incoming_value)
1425 emit_move_insn (adjust_address (registers, Pmode, size),
1426 copy_to_reg (struct_incoming_value));
1427 size += GET_MODE_SIZE (Pmode);
1430 /* Return the address of the block. */
1431 return copy_addr_to_reg (XEXP (registers, 0));
1434 /* __builtin_apply_args returns block of memory allocated on
1435 the stack into which is stored the arg pointer, structure
1436 value address, static chain, and all the registers that might
1437 possibly be used in performing a function call. The code is
1438 moved to the start of the function so the incoming values are
1442 expand_builtin_apply_args (void)
1444 /* Don't do __builtin_apply_args more than once in a function.
1445 Save the result of the first call and reuse it. */
1446 if (apply_args_value != 0)
1447 return apply_args_value;
1449 /* When this function is called, it means that registers must be
1450 saved on entry to this function. So we migrate the
1451 call to the first insn of this function. */
1456 temp = expand_builtin_apply_args_1 ();
1460 apply_args_value = temp;
1462 /* Put the insns after the NOTE that starts the function.
1463 If this is inside a start_sequence, make the outer-level insn
1464 chain current, so the code is placed at the start of the
1465 function. If internal_arg_pointer is a non-virtual pseudo,
1466 it needs to be placed after the function that initializes
1468 push_topmost_sequence ();
1469 if (REG_P (crtl->args.internal_arg_pointer)
1470 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1471 emit_insn_before (seq, parm_birth_insn);
1473 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1474 pop_topmost_sequence ();
1479 /* Perform an untyped call and save the state required to perform an
1480 untyped return of whatever value was returned by the given function. */
1483 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1485 int size, align, regno;
1486 enum machine_mode mode;
1487 rtx incoming_args, result, reg, dest, src, call_insn;
1488 rtx old_stack_level = 0;
1489 rtx call_fusage = 0;
1490 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1492 arguments = convert_memory_address (Pmode, arguments);
1494 /* Create a block where the return registers can be saved. */
1495 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1497 /* Fetch the arg pointer from the ARGUMENTS block. */
1498 incoming_args = gen_reg_rtx (Pmode);
1499 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1500 #ifndef STACK_GROWS_DOWNWARD
1501 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1502 incoming_args, 0, OPTAB_LIB_WIDEN);
1505 /* Push a new argument block and copy the arguments. Do not allow
1506 the (potential) memcpy call below to interfere with our stack
1508 do_pending_stack_adjust ();
1511 /* Save the stack with nonlocal if available. */
1512 #ifdef HAVE_save_stack_nonlocal
1513 if (HAVE_save_stack_nonlocal)
1514 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1517 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1519 /* Allocate a block of memory onto the stack and copy the memory
1520 arguments to the outgoing arguments address. */
1521 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1523 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1524 may have already set current_function_calls_alloca to true.
1525 current_function_calls_alloca won't be set if argsize is zero,
1526 so we have to guarantee need_drap is true here. */
1527 if (SUPPORTS_STACK_ALIGNMENT)
1528 crtl->need_drap = true;
1530 dest = virtual_outgoing_args_rtx;
1531 #ifndef STACK_GROWS_DOWNWARD
1532 if (CONST_INT_P (argsize))
1533 dest = plus_constant (dest, -INTVAL (argsize));
1535 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1537 dest = gen_rtx_MEM (BLKmode, dest);
1538 set_mem_align (dest, PARM_BOUNDARY);
1539 src = gen_rtx_MEM (BLKmode, incoming_args);
1540 set_mem_align (src, PARM_BOUNDARY);
1541 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1543 /* Refer to the argument block. */
1545 arguments = gen_rtx_MEM (BLKmode, arguments);
1546 set_mem_align (arguments, PARM_BOUNDARY);
1548 /* Walk past the arg-pointer and structure value address. */
1549 size = GET_MODE_SIZE (Pmode);
1551 size += GET_MODE_SIZE (Pmode);
1553 /* Restore each of the registers previously saved. Make USE insns
1554 for each of these registers for use in making the call. */
1555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1556 if ((mode = apply_args_mode[regno]) != VOIDmode)
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 reg = gen_rtx_REG (mode, regno);
1562 emit_move_insn (reg, adjust_address (arguments, mode, size));
1563 use_reg (&call_fusage, reg);
1564 size += GET_MODE_SIZE (mode);
1567 /* Restore the structure value address unless this is passed as an
1568 "invisible" first argument. */
1569 size = GET_MODE_SIZE (Pmode);
1572 rtx value = gen_reg_rtx (Pmode);
1573 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1574 emit_move_insn (struct_value, value);
1575 if (REG_P (struct_value))
1576 use_reg (&call_fusage, struct_value);
1577 size += GET_MODE_SIZE (Pmode);
1580 /* All arguments and registers used for the call are set up by now! */
1581 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1583 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1584 and we don't want to load it into a register as an optimization,
1585 because prepare_call_address already did it if it should be done. */
1586 if (GET_CODE (function) != SYMBOL_REF)
1587 function = memory_address (FUNCTION_MODE, function);
1589 /* Generate the actual call instruction and save the return value. */
1590 #ifdef HAVE_untyped_call
1591 if (HAVE_untyped_call)
1592 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1593 result, result_vector (1, result)));
1596 #ifdef HAVE_call_value
1597 if (HAVE_call_value)
1601 /* Locate the unique return register. It is not possible to
1602 express a call that sets more than one return register using
1603 call_value; use untyped_call for that. In fact, untyped_call
1604 only needs to save the return registers in the given block. */
1605 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1606 if ((mode = apply_result_mode[regno]) != VOIDmode)
1608 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1610 valreg = gen_rtx_REG (mode, regno);
1613 emit_call_insn (GEN_CALL_VALUE (valreg,
1614 gen_rtx_MEM (FUNCTION_MODE, function),
1615 const0_rtx, NULL_RTX, const0_rtx));
1617 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1623 /* Find the CALL insn we just emitted, and attach the register usage
1625 call_insn = last_call_insn ();
1626 add_function_usage_to (call_insn, call_fusage);
1628 /* Restore the stack. */
1629 #ifdef HAVE_save_stack_nonlocal
1630 if (HAVE_save_stack_nonlocal)
1631 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1634 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1638 /* Return the address of the result block. */
1639 result = copy_addr_to_reg (XEXP (result, 0));
1640 return convert_memory_address (ptr_mode, result);
1643 /* Perform an untyped return. */
1646 expand_builtin_return (rtx result)
1648 int size, align, regno;
1649 enum machine_mode mode;
1651 rtx call_fusage = 0;
1653 result = convert_memory_address (Pmode, result);
1655 apply_result_size ();
1656 result = gen_rtx_MEM (BLKmode, result);
1658 #ifdef HAVE_untyped_return
1659 if (HAVE_untyped_return)
1661 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1667 /* Restore the return value and note that each value is used. */
1669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1670 if ((mode = apply_result_mode[regno]) != VOIDmode)
1672 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1673 if (size % align != 0)
1674 size = CEIL (size, align) * align;
1675 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1676 emit_move_insn (reg, adjust_address (result, mode, size));
1678 push_to_sequence (call_fusage);
1680 call_fusage = get_insns ();
1682 size += GET_MODE_SIZE (mode);
1685 /* Put the USE insns before the return. */
1686 emit_insn (call_fusage);
1688 /* Return whatever values was restored by jumping directly to the end
1690 expand_naked_return ();
1693 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1695 static enum type_class
1696 type_to_class (tree type)
1698 switch (TREE_CODE (type))
1700 case VOID_TYPE: return void_type_class;
1701 case INTEGER_TYPE: return integer_type_class;
1702 case ENUMERAL_TYPE: return enumeral_type_class;
1703 case BOOLEAN_TYPE: return boolean_type_class;
1704 case POINTER_TYPE: return pointer_type_class;
1705 case REFERENCE_TYPE: return reference_type_class;
1706 case OFFSET_TYPE: return offset_type_class;
1707 case REAL_TYPE: return real_type_class;
1708 case COMPLEX_TYPE: return complex_type_class;
1709 case FUNCTION_TYPE: return function_type_class;
1710 case METHOD_TYPE: return method_type_class;
1711 case RECORD_TYPE: return record_type_class;
1713 case QUAL_UNION_TYPE: return union_type_class;
1714 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1715 ? string_type_class : array_type_class);
1716 case LANG_TYPE: return lang_type_class;
1717 default: return no_type_class;
1721 /* Expand a call EXP to __builtin_classify_type. */
1724 expand_builtin_classify_type (tree exp)
1726 if (call_expr_nargs (exp))
1727 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1728 return GEN_INT (no_type_class);
1731 /* This helper macro, meant to be used in mathfn_built_in below,
1732 determines which among a set of three builtin math functions is
1733 appropriate for a given type mode. The `F' and `L' cases are
1734 automatically generated from the `double' case. */
1735 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1736 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1737 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1738 fcodel = BUILT_IN_MATHFN##L ; break;
1739 /* Similar to above, but appends _R after any F/L suffix. */
1740 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1741 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1742 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1743 fcodel = BUILT_IN_MATHFN##L_R ; break;
1745 /* Return mathematic function equivalent to FN but operating directly
1746 on TYPE, if available. If IMPLICIT is true find the function in
1747 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1748 can't do the conversion, return zero. */
1751 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1753 tree const *const fn_arr
1754 = implicit ? implicit_built_in_decls : built_in_decls;
1755 enum built_in_function fcode, fcodef, fcodel;
1759 CASE_MATHFN (BUILT_IN_ACOS)
1760 CASE_MATHFN (BUILT_IN_ACOSH)
1761 CASE_MATHFN (BUILT_IN_ASIN)
1762 CASE_MATHFN (BUILT_IN_ASINH)
1763 CASE_MATHFN (BUILT_IN_ATAN)
1764 CASE_MATHFN (BUILT_IN_ATAN2)
1765 CASE_MATHFN (BUILT_IN_ATANH)
1766 CASE_MATHFN (BUILT_IN_CBRT)
1767 CASE_MATHFN (BUILT_IN_CEIL)
1768 CASE_MATHFN (BUILT_IN_CEXPI)
1769 CASE_MATHFN (BUILT_IN_COPYSIGN)
1770 CASE_MATHFN (BUILT_IN_COS)
1771 CASE_MATHFN (BUILT_IN_COSH)
1772 CASE_MATHFN (BUILT_IN_DREM)
1773 CASE_MATHFN (BUILT_IN_ERF)
1774 CASE_MATHFN (BUILT_IN_ERFC)
1775 CASE_MATHFN (BUILT_IN_EXP)
1776 CASE_MATHFN (BUILT_IN_EXP10)
1777 CASE_MATHFN (BUILT_IN_EXP2)
1778 CASE_MATHFN (BUILT_IN_EXPM1)
1779 CASE_MATHFN (BUILT_IN_FABS)
1780 CASE_MATHFN (BUILT_IN_FDIM)
1781 CASE_MATHFN (BUILT_IN_FLOOR)
1782 CASE_MATHFN (BUILT_IN_FMA)
1783 CASE_MATHFN (BUILT_IN_FMAX)
1784 CASE_MATHFN (BUILT_IN_FMIN)
1785 CASE_MATHFN (BUILT_IN_FMOD)
1786 CASE_MATHFN (BUILT_IN_FREXP)
1787 CASE_MATHFN (BUILT_IN_GAMMA)
1788 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1789 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1790 CASE_MATHFN (BUILT_IN_HYPOT)
1791 CASE_MATHFN (BUILT_IN_ILOGB)
1792 CASE_MATHFN (BUILT_IN_INF)
1793 CASE_MATHFN (BUILT_IN_ISINF)
1794 CASE_MATHFN (BUILT_IN_J0)
1795 CASE_MATHFN (BUILT_IN_J1)
1796 CASE_MATHFN (BUILT_IN_JN)
1797 CASE_MATHFN (BUILT_IN_LCEIL)
1798 CASE_MATHFN (BUILT_IN_LDEXP)
1799 CASE_MATHFN (BUILT_IN_LFLOOR)
1800 CASE_MATHFN (BUILT_IN_LGAMMA)
1801 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1802 CASE_MATHFN (BUILT_IN_LLCEIL)
1803 CASE_MATHFN (BUILT_IN_LLFLOOR)
1804 CASE_MATHFN (BUILT_IN_LLRINT)
1805 CASE_MATHFN (BUILT_IN_LLROUND)
1806 CASE_MATHFN (BUILT_IN_LOG)
1807 CASE_MATHFN (BUILT_IN_LOG10)
1808 CASE_MATHFN (BUILT_IN_LOG1P)
1809 CASE_MATHFN (BUILT_IN_LOG2)
1810 CASE_MATHFN (BUILT_IN_LOGB)
1811 CASE_MATHFN (BUILT_IN_LRINT)
1812 CASE_MATHFN (BUILT_IN_LROUND)
1813 CASE_MATHFN (BUILT_IN_MODF)
1814 CASE_MATHFN (BUILT_IN_NAN)
1815 CASE_MATHFN (BUILT_IN_NANS)
1816 CASE_MATHFN (BUILT_IN_NEARBYINT)
1817 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1818 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1819 CASE_MATHFN (BUILT_IN_POW)
1820 CASE_MATHFN (BUILT_IN_POWI)
1821 CASE_MATHFN (BUILT_IN_POW10)
1822 CASE_MATHFN (BUILT_IN_REMAINDER)
1823 CASE_MATHFN (BUILT_IN_REMQUO)
1824 CASE_MATHFN (BUILT_IN_RINT)
1825 CASE_MATHFN (BUILT_IN_ROUND)
1826 CASE_MATHFN (BUILT_IN_SCALB)
1827 CASE_MATHFN (BUILT_IN_SCALBLN)
1828 CASE_MATHFN (BUILT_IN_SCALBN)
1829 CASE_MATHFN (BUILT_IN_SIGNBIT)
1830 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1831 CASE_MATHFN (BUILT_IN_SIN)
1832 CASE_MATHFN (BUILT_IN_SINCOS)
1833 CASE_MATHFN (BUILT_IN_SINH)
1834 CASE_MATHFN (BUILT_IN_SQRT)
1835 CASE_MATHFN (BUILT_IN_TAN)
1836 CASE_MATHFN (BUILT_IN_TANH)
1837 CASE_MATHFN (BUILT_IN_TGAMMA)
1838 CASE_MATHFN (BUILT_IN_TRUNC)
1839 CASE_MATHFN (BUILT_IN_Y0)
1840 CASE_MATHFN (BUILT_IN_Y1)
1841 CASE_MATHFN (BUILT_IN_YN)
1847 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1848 return fn_arr[fcode];
1849 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1850 return fn_arr[fcodef];
1851 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1852 return fn_arr[fcodel];
1857 /* Like mathfn_built_in_1(), but always use the implicit array. */
1860 mathfn_built_in (tree type, enum built_in_function fn)
1862 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1865 /* If errno must be maintained, expand the RTL to check if the result,
1866 TARGET, of a built-in function call, EXP, is NaN, and if so set
1870 expand_errno_check (tree exp, rtx target)
1872 rtx lab = gen_label_rtx ();
1874 /* Test the result; if it is NaN, set errno=EDOM because
1875 the argument was not in the domain. */
1876 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1877 NULL_RTX, NULL_RTX, lab);
1880 /* If this built-in doesn't throw an exception, set errno directly. */
1881 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1883 #ifdef GEN_ERRNO_RTX
1884 rtx errno_rtx = GEN_ERRNO_RTX;
1887 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1889 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1895 /* Make sure the library call isn't expanded as a tail call. */
1896 CALL_EXPR_TAILCALL (exp) = 0;
1898 /* We can't set errno=EDOM directly; let the library call do it.
1899 Pop the arguments right away in case the call gets deleted. */
1901 expand_call (exp, target, 0);
1906 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1907 Return NULL_RTX if a normal call should be emitted rather than expanding
1908 the function in-line. EXP is the expression that is a call to the builtin
1909 function; if convenient, the result should be placed in TARGET.
1910 SUBTARGET may be used as the target for computing one of EXP's operands. */
1913 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1915 optab builtin_optab;
1917 tree fndecl = get_callee_fndecl (exp);
1918 enum machine_mode mode;
1919 bool errno_set = false;
1922 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1925 arg = CALL_EXPR_ARG (exp, 0);
1927 switch (DECL_FUNCTION_CODE (fndecl))
1929 CASE_FLT_FN (BUILT_IN_SQRT):
1930 errno_set = ! tree_expr_nonnegative_p (arg);
1931 builtin_optab = sqrt_optab;
1933 CASE_FLT_FN (BUILT_IN_EXP):
1934 errno_set = true; builtin_optab = exp_optab; break;
1935 CASE_FLT_FN (BUILT_IN_EXP10):
1936 CASE_FLT_FN (BUILT_IN_POW10):
1937 errno_set = true; builtin_optab = exp10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP2):
1939 errno_set = true; builtin_optab = exp2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_EXPM1):
1941 errno_set = true; builtin_optab = expm1_optab; break;
1942 CASE_FLT_FN (BUILT_IN_LOGB):
1943 errno_set = true; builtin_optab = logb_optab; break;
1944 CASE_FLT_FN (BUILT_IN_LOG):
1945 errno_set = true; builtin_optab = log_optab; break;
1946 CASE_FLT_FN (BUILT_IN_LOG10):
1947 errno_set = true; builtin_optab = log10_optab; break;
1948 CASE_FLT_FN (BUILT_IN_LOG2):
1949 errno_set = true; builtin_optab = log2_optab; break;
1950 CASE_FLT_FN (BUILT_IN_LOG1P):
1951 errno_set = true; builtin_optab = log1p_optab; break;
1952 CASE_FLT_FN (BUILT_IN_ASIN):
1953 builtin_optab = asin_optab; break;
1954 CASE_FLT_FN (BUILT_IN_ACOS):
1955 builtin_optab = acos_optab; break;
1956 CASE_FLT_FN (BUILT_IN_TAN):
1957 builtin_optab = tan_optab; break;
1958 CASE_FLT_FN (BUILT_IN_ATAN):
1959 builtin_optab = atan_optab; break;
1960 CASE_FLT_FN (BUILT_IN_FLOOR):
1961 builtin_optab = floor_optab; break;
1962 CASE_FLT_FN (BUILT_IN_CEIL):
1963 builtin_optab = ceil_optab; break;
1964 CASE_FLT_FN (BUILT_IN_TRUNC):
1965 builtin_optab = btrunc_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ROUND):
1967 builtin_optab = round_optab; break;
1968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1969 builtin_optab = nearbyint_optab;
1970 if (flag_trapping_math)
1972 /* Else fallthrough and expand as rint. */
1973 CASE_FLT_FN (BUILT_IN_RINT):
1974 builtin_optab = rint_optab; break;
1975 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1976 builtin_optab = significand_optab; break;
1981 /* Make a suitable register to place result in. */
1982 mode = TYPE_MODE (TREE_TYPE (exp));
1984 if (! flag_errno_math || ! HONOR_NANS (mode))
1987 /* Before working hard, check whether the instruction is available. */
1988 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1990 target = gen_reg_rtx (mode);
1992 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1993 need to expand the argument again. This way, we will not perform
1994 side-effects more the once. */
1995 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1997 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2001 /* Compute into TARGET.
2002 Set TARGET to wherever the result comes back. */
2003 target = expand_unop (mode, builtin_optab, op0, target, 0);
2008 expand_errno_check (exp, target);
2010 /* Output the entire sequence. */
2011 insns = get_insns ();
2017 /* If we were unable to expand via the builtin, stop the sequence
2018 (without outputting the insns) and call to the library function
2019 with the stabilized argument list. */
2023 return expand_call (exp, target, target == const0_rtx);
2026 /* Expand a call to the builtin binary math functions (pow and atan2).
2027 Return NULL_RTX if a normal call should be emitted rather than expanding the
2028 function in-line. EXP is the expression that is a call to the builtin
2029 function; if convenient, the result should be placed in TARGET.
2030 SUBTARGET may be used as the target for computing one of EXP's
2034 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2036 optab builtin_optab;
2037 rtx op0, op1, insns;
2038 int op1_type = REAL_TYPE;
2039 tree fndecl = get_callee_fndecl (exp);
2041 enum machine_mode mode;
2042 bool errno_set = true;
2044 switch (DECL_FUNCTION_CODE (fndecl))
2046 CASE_FLT_FN (BUILT_IN_SCALBN):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN):
2048 CASE_FLT_FN (BUILT_IN_LDEXP):
2049 op1_type = INTEGER_TYPE;
2054 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2057 arg0 = CALL_EXPR_ARG (exp, 0);
2058 arg1 = CALL_EXPR_ARG (exp, 1);
2060 switch (DECL_FUNCTION_CODE (fndecl))
2062 CASE_FLT_FN (BUILT_IN_POW):
2063 builtin_optab = pow_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN2):
2065 builtin_optab = atan2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_SCALB):
2067 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2069 builtin_optab = scalb_optab; break;
2070 CASE_FLT_FN (BUILT_IN_SCALBN):
2071 CASE_FLT_FN (BUILT_IN_SCALBLN):
2072 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2074 /* Fall through... */
2075 CASE_FLT_FN (BUILT_IN_LDEXP):
2076 builtin_optab = ldexp_optab; break;
2077 CASE_FLT_FN (BUILT_IN_FMOD):
2078 builtin_optab = fmod_optab; break;
2079 CASE_FLT_FN (BUILT_IN_REMAINDER):
2080 CASE_FLT_FN (BUILT_IN_DREM):
2081 builtin_optab = remainder_optab; break;
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2089 /* Before working hard, check whether the instruction is available. */
2090 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2093 target = gen_reg_rtx (mode);
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2098 /* Always stabilize the argument list. */
2099 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2100 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2102 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2103 op1 = expand_normal (arg1);
2107 /* Compute into TARGET.
2108 Set TARGET to wherever the result comes back. */
2109 target = expand_binop (mode, builtin_optab, op0, op1,
2110 target, 0, OPTAB_DIRECT);
2112 /* If we were unable to expand via the builtin, stop the sequence
2113 (without outputting the insns) and call to the library function
2114 with the stabilized argument list. */
2118 return expand_call (exp, target, target == const0_rtx);
2122 expand_errno_check (exp, target);
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2132 /* Expand a call to the builtin sin and cos math functions.
2133 Return NULL_RTX if a normal call should be emitted rather than expanding the
2134 function in-line. EXP is the expression that is a call to the builtin
2135 function; if convenient, the result should be placed in TARGET.
2136 SUBTARGET may be used as the target for computing one of EXP's
2140 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2142 optab builtin_optab;
2144 tree fndecl = get_callee_fndecl (exp);
2145 enum machine_mode mode;
2148 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2151 arg = CALL_EXPR_ARG (exp, 0);
2153 switch (DECL_FUNCTION_CODE (fndecl))
2155 CASE_FLT_FN (BUILT_IN_SIN):
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 builtin_optab = sincos_optab; break;
2162 /* Make a suitable register to place result in. */
2163 mode = TYPE_MODE (TREE_TYPE (exp));
2165 /* Check if sincos insn is available, otherwise fallback
2166 to sin or cos insn. */
2167 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 CASE_FLT_FN (BUILT_IN_SIN):
2171 builtin_optab = sin_optab; break;
2172 CASE_FLT_FN (BUILT_IN_COS):
2173 builtin_optab = cos_optab; break;
2178 /* Before working hard, check whether the instruction is available. */
2179 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2181 target = gen_reg_rtx (mode);
2183 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2184 need to expand the argument again. This way, we will not perform
2185 side-effects more the once. */
2186 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2188 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2192 /* Compute into TARGET.
2193 Set TARGET to wherever the result comes back. */
2194 if (builtin_optab == sincos_optab)
2198 switch (DECL_FUNCTION_CODE (fndecl))
2200 CASE_FLT_FN (BUILT_IN_SIN):
2201 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2209 gcc_assert (result);
2213 target = expand_unop (mode, builtin_optab, op0, target, 0);
2218 /* Output the entire sequence. */
2219 insns = get_insns ();
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2231 target = expand_call (exp, target, target == const0_rtx);
2236 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2237 return an RTL instruction code that implements the functionality.
2238 If that isn't possible or available return CODE_FOR_nothing. */
2240 static enum insn_code
2241 interclass_mathfn_icode (tree arg, tree fndecl)
2243 bool errno_set = false;
2244 optab builtin_optab = 0;
2245 enum machine_mode mode;
2247 switch (DECL_FUNCTION_CODE (fndecl))
2249 CASE_FLT_FN (BUILT_IN_ILOGB):
2250 errno_set = true; builtin_optab = ilogb_optab; break;
2251 CASE_FLT_FN (BUILT_IN_ISINF):
2252 builtin_optab = isinf_optab; break;
2253 case BUILT_IN_ISNORMAL:
2254 case BUILT_IN_ISFINITE:
2255 CASE_FLT_FN (BUILT_IN_FINITE):
2256 case BUILT_IN_FINITED32:
2257 case BUILT_IN_FINITED64:
2258 case BUILT_IN_FINITED128:
2259 case BUILT_IN_ISINFD32:
2260 case BUILT_IN_ISINFD64:
2261 case BUILT_IN_ISINFD128:
2262 /* These builtins have no optabs (yet). */
2268 /* There's no easy way to detect the case we need to set EDOM. */
2269 if (flag_errno_math && errno_set)
2270 return CODE_FOR_nothing;
2272 /* Optab mode depends on the mode of the input argument. */
2273 mode = TYPE_MODE (TREE_TYPE (arg));
2276 return optab_handler (builtin_optab, mode)->insn_code;
2277 return CODE_FOR_nothing;
2280 /* Expand a call to one of the builtin math functions that operate on
2281 floating point argument and output an integer result (ilogb, isinf,
2283 Return 0 if a normal call should be emitted rather than expanding the
2284 function in-line. EXP is the expression that is a call to the builtin
2285 function; if convenient, the result should be placed in TARGET.
2286 SUBTARGET may be used as the target for computing one of EXP's operands. */
2289 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2291 enum insn_code icode = CODE_FOR_nothing;
2293 tree fndecl = get_callee_fndecl (exp);
2294 enum machine_mode mode;
2297 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2300 arg = CALL_EXPR_ARG (exp, 0);
2301 icode = interclass_mathfn_icode (arg, fndecl);
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2304 if (icode != CODE_FOR_nothing)
2306 /* Make a suitable register to place result in. */
2308 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2309 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2311 gcc_assert (insn_data[icode].operand[0].predicate
2312 (target, GET_MODE (target)));
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 if (mode != GET_MODE (op0))
2322 op0 = convert_to_mode (mode, op0, 0);
2324 /* Compute into TARGET.
2325 Set TARGET to wherever the result comes back. */
2326 emit_unop_insn (icode, target, op0, UNKNOWN);
2333 /* Expand a call to the builtin sincos math function.
2334 Return NULL_RTX if a normal call should be emitted rather than expanding the
2335 function in-line. EXP is the expression that is a call to the builtin
2339 expand_builtin_sincos (tree exp)
2341 rtx op0, op1, op2, target1, target2;
2342 enum machine_mode mode;
2343 tree arg, sinp, cosp;
2345 location_t loc = EXPR_LOCATION (exp);
2347 if (!validate_arglist (exp, REAL_TYPE,
2348 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2351 arg = CALL_EXPR_ARG (exp, 0);
2352 sinp = CALL_EXPR_ARG (exp, 1);
2353 cosp = CALL_EXPR_ARG (exp, 2);
2355 /* Make a suitable register to place result in. */
2356 mode = TYPE_MODE (TREE_TYPE (arg));
2358 /* Check if sincos insn is available, otherwise emit the call. */
2359 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2362 target1 = gen_reg_rtx (mode);
2363 target2 = gen_reg_rtx (mode);
2365 op0 = expand_normal (arg);
2366 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2367 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2369 /* Compute into target1 and target2.
2370 Set TARGET to wherever the result comes back. */
2371 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2372 gcc_assert (result);
2374 /* Move target1 and target2 to the memory locations indicated
2376 emit_move_insn (op1, target1);
2377 emit_move_insn (op2, target2);
2382 /* Expand a call to the internal cexpi builtin to the sincos math function.
2383 EXP is the expression that is a call to the builtin function; if convenient,
2384 the result should be placed in TARGET. SUBTARGET may be used as the target
2385 for computing one of EXP's operands. */
2388 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2390 tree fndecl = get_callee_fndecl (exp);
2392 enum machine_mode mode;
2394 location_t loc = EXPR_LOCATION (exp);
2396 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2399 arg = CALL_EXPR_ARG (exp, 0);
2400 type = TREE_TYPE (arg);
2401 mode = TYPE_MODE (TREE_TYPE (arg));
2403 /* Try expanding via a sincos optab, fall back to emitting a libcall
2404 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2405 is only generated from sincos, cexp or if we have either of them. */
2406 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2408 op1 = gen_reg_rtx (mode);
2409 op2 = gen_reg_rtx (mode);
2411 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2413 /* Compute into op1 and op2. */
2414 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2416 else if (TARGET_HAS_SINCOS)
2418 tree call, fn = NULL_TREE;
2422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2423 fn = built_in_decls[BUILT_IN_SINCOSF];
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2425 fn = built_in_decls[BUILT_IN_SINCOS];
2426 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2427 fn = built_in_decls[BUILT_IN_SINCOSL];
2431 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2432 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2433 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2434 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2435 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2436 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2438 /* Make sure not to fold the sincos call again. */
2439 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2440 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2441 call, 3, arg, top1, top2));
2445 tree call, fn = NULL_TREE, narg;
2446 tree ctype = build_complex_type (type);
2448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2449 fn = built_in_decls[BUILT_IN_CEXPF];
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2451 fn = built_in_decls[BUILT_IN_CEXP];
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2453 fn = built_in_decls[BUILT_IN_CEXPL];
2457 /* If we don't have a decl for cexp create one. This is the
2458 friendliest fallback if the user calls __builtin_cexpi
2459 without full target C99 function support. */
2460 if (fn == NULL_TREE)
2463 const char *name = NULL;
2465 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2467 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2469 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2472 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2473 fn = build_fn_decl (name, fntype);
2476 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2477 build_real (type, dconst0), arg);
2479 /* Make sure not to fold the cexp call again. */
2480 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2481 return expand_expr (build_call_nary (ctype, call, 1, narg),
2482 target, VOIDmode, EXPAND_NORMAL);
2485 /* Now build the proper return type. */
2486 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2487 make_tree (TREE_TYPE (arg), op2),
2488 make_tree (TREE_TYPE (arg), op1)),
2489 target, VOIDmode, EXPAND_NORMAL);
2492 /* Conveniently construct a function call expression. FNDECL names the
2493 function to be called, N is the number of arguments, and the "..."
2494 parameters are the argument expressions. Unlike build_call_exr
2495 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2498 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2501 tree fntype = TREE_TYPE (fndecl);
2502 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2505 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2507 SET_EXPR_LOCATION (fn, loc);
2510 #define build_call_nofold(...) \
2511 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2513 /* Expand a call to one of the builtin rounding functions gcc defines
2514 as an extension (lfloor and lceil). As these are gcc extensions we
2515 do not need to worry about setting errno to EDOM.
2516 If expanding via optab fails, lower expression to (int)(floor(x)).
2517 EXP is the expression that is a call to the builtin function;
2518 if convenient, the result should be placed in TARGET. */
2521 expand_builtin_int_roundingfn (tree exp, rtx target)
2523 convert_optab builtin_optab;
2524 rtx op0, insns, tmp;
2525 tree fndecl = get_callee_fndecl (exp);
2526 enum built_in_function fallback_fn;
2527 tree fallback_fndecl;
2528 enum machine_mode mode;
2531 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2534 arg = CALL_EXPR_ARG (exp, 0);
2536 switch (DECL_FUNCTION_CODE (fndecl))
2538 CASE_FLT_FN (BUILT_IN_LCEIL):
2539 CASE_FLT_FN (BUILT_IN_LLCEIL):
2540 builtin_optab = lceil_optab;
2541 fallback_fn = BUILT_IN_CEIL;
2544 CASE_FLT_FN (BUILT_IN_LFLOOR):
2545 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2546 builtin_optab = lfloor_optab;
2547 fallback_fn = BUILT_IN_FLOOR;
2554 /* Make a suitable register to place result in. */
2555 mode = TYPE_MODE (TREE_TYPE (exp));
2557 target = gen_reg_rtx (mode);
2559 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2560 need to expand the argument again. This way, we will not perform
2561 side-effects more the once. */
2562 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2564 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2568 /* Compute into TARGET. */
2569 if (expand_sfix_optab (target, op0, builtin_optab))
2571 /* Output the entire sequence. */
2572 insns = get_insns ();
2578 /* If we were unable to expand via the builtin, stop the sequence
2579 (without outputting the insns). */
2582 /* Fall back to floating point rounding optab. */
2583 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2585 /* For non-C99 targets we may end up without a fallback fndecl here
2586 if the user called __builtin_lfloor directly. In this case emit
2587 a call to the floor/ceil variants nevertheless. This should result
2588 in the best user experience for not full C99 targets. */
2589 if (fallback_fndecl == NULL_TREE)
2592 const char *name = NULL;
2594 switch (DECL_FUNCTION_CODE (fndecl))
2596 case BUILT_IN_LCEIL:
2597 case BUILT_IN_LLCEIL:
2600 case BUILT_IN_LCEILF:
2601 case BUILT_IN_LLCEILF:
2604 case BUILT_IN_LCEILL:
2605 case BUILT_IN_LLCEILL:
2608 case BUILT_IN_LFLOOR:
2609 case BUILT_IN_LLFLOOR:
2612 case BUILT_IN_LFLOORF:
2613 case BUILT_IN_LLFLOORF:
2616 case BUILT_IN_LFLOORL:
2617 case BUILT_IN_LLFLOORL:
2624 fntype = build_function_type_list (TREE_TYPE (arg),
2625 TREE_TYPE (arg), NULL_TREE);
2626 fallback_fndecl = build_fn_decl (name, fntype);
2629 exp = build_call_nofold (fallback_fndecl, 1, arg);
2631 tmp = expand_normal (exp);
2633 /* Truncate the result of floating point optab to integer
2634 via expand_fix (). */
2635 target = gen_reg_rtx (mode);
2636 expand_fix (target, tmp, 0);
2641 /* Expand a call to one of the builtin math functions doing integer
2643 Return 0 if a normal call should be emitted rather than expanding the
2644 function in-line. EXP is the expression that is a call to the builtin
2645 function; if convenient, the result should be placed in TARGET. */
2648 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2650 convert_optab builtin_optab;
2652 tree fndecl = get_callee_fndecl (exp);
2654 enum machine_mode mode;
2656 /* There's no easy way to detect the case we need to set EDOM. */
2657 if (flag_errno_math)
2660 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2663 arg = CALL_EXPR_ARG (exp, 0);
2665 switch (DECL_FUNCTION_CODE (fndecl))
2667 CASE_FLT_FN (BUILT_IN_LRINT):
2668 CASE_FLT_FN (BUILT_IN_LLRINT):
2669 builtin_optab = lrint_optab; break;
2670 CASE_FLT_FN (BUILT_IN_LROUND):
2671 CASE_FLT_FN (BUILT_IN_LLROUND):
2672 builtin_optab = lround_optab; break;
2677 /* Make a suitable register to place result in. */
2678 mode = TYPE_MODE (TREE_TYPE (exp));
2680 target = gen_reg_rtx (mode);
2682 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2683 need to expand the argument again. This way, we will not perform
2684 side-effects more the once. */
2685 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2687 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2691 if (expand_sfix_optab (target, op0, builtin_optab))
2693 /* Output the entire sequence. */
2694 insns = get_insns ();
2700 /* If we were unable to expand via the builtin, stop the sequence
2701 (without outputting the insns) and call to the library function
2702 with the stabilized argument list. */
2705 target = expand_call (exp, target, target == const0_rtx);
2710 /* To evaluate powi(x,n), the floating point value x raised to the
2711 constant integer exponent n, we use a hybrid algorithm that
2712 combines the "window method" with look-up tables. For an
2713 introduction to exponentiation algorithms and "addition chains",
2714 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2715 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2716 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2717 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2719 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2720 multiplications to inline before calling the system library's pow
2721 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2722 so this default never requires calling pow, powf or powl. */
2724 #ifndef POWI_MAX_MULTS
2725 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2728 /* The size of the "optimal power tree" lookup table. All
2729 exponents less than this value are simply looked up in the
2730 powi_table below. This threshold is also used to size the
2731 cache of pseudo registers that hold intermediate results. */
2732 #define POWI_TABLE_SIZE 256
2734 /* The size, in bits of the window, used in the "window method"
2735 exponentiation algorithm. This is equivalent to a radix of
2736 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2737 #define POWI_WINDOW_SIZE 3
2739 /* The following table is an efficient representation of an
2740 "optimal power tree". For each value, i, the corresponding
2741 value, j, in the table states than an optimal evaluation
2742 sequence for calculating pow(x,i) can be found by evaluating
2743 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2744 100 integers is given in Knuth's "Seminumerical algorithms". */
2746 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2748 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2749 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2750 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2751 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2752 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2753 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2754 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2755 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2756 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2757 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2758 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2759 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2760 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2761 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2762 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2763 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2764 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2765 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2766 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2767 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2768 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2769 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2770 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2771 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2772 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2773 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2774 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2775 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2776 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2777 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2778 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2779 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2783 /* Return the number of multiplications required to calculate
2784 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2785 subroutine of powi_cost. CACHE is an array indicating
2786 which exponents have already been calculated. */
2789 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2791 /* If we've already calculated this exponent, then this evaluation
2792 doesn't require any additional multiplications. */
2797 return powi_lookup_cost (n - powi_table[n], cache)
2798 + powi_lookup_cost (powi_table[n], cache) + 1;
2801 /* Return the number of multiplications required to calculate
2802 powi(x,n) for an arbitrary x, given the exponent N. This
2803 function needs to be kept in sync with expand_powi below. */
2806 powi_cost (HOST_WIDE_INT n)
2808 bool cache[POWI_TABLE_SIZE];
2809 unsigned HOST_WIDE_INT digit;
2810 unsigned HOST_WIDE_INT val;
2816 /* Ignore the reciprocal when calculating the cost. */
2817 val = (n < 0) ? -n : n;
2819 /* Initialize the exponent cache. */
2820 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2825 while (val >= POWI_TABLE_SIZE)
2829 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2830 result += powi_lookup_cost (digit, cache)
2831 + POWI_WINDOW_SIZE + 1;
2832 val >>= POWI_WINDOW_SIZE;
2841 return result + powi_lookup_cost (val, cache);
2844 /* Recursive subroutine of expand_powi. This function takes the array,
2845 CACHE, of already calculated exponents and an exponent N and returns
2846 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2849 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2851 unsigned HOST_WIDE_INT digit;
2855 if (n < POWI_TABLE_SIZE)
2860 target = gen_reg_rtx (mode);
2863 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2864 op1 = expand_powi_1 (mode, powi_table[n], cache);
2868 target = gen_reg_rtx (mode);
2869 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2870 op0 = expand_powi_1 (mode, n - digit, cache);
2871 op1 = expand_powi_1 (mode, digit, cache);
2875 target = gen_reg_rtx (mode);
2876 op0 = expand_powi_1 (mode, n >> 1, cache);
2880 result = expand_mult (mode, op0, op1, target, 0);
2881 if (result != target)
2882 emit_move_insn (target, result);
2886 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2887 floating point operand in mode MODE, and N is the exponent. This
2888 function needs to be kept in sync with powi_cost above. */
2891 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2893 rtx cache[POWI_TABLE_SIZE];
2897 return CONST1_RTX (mode);
2899 memset (cache, 0, sizeof (cache));
2902 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2904 /* If the original exponent was negative, reciprocate the result. */
2906 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2907 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2912 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2913 a normal call should be emitted rather than expanding the function
2914 in-line. EXP is the expression that is a call to the builtin
2915 function; if convenient, the result should be placed in TARGET. */
2918 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2922 tree type = TREE_TYPE (exp);
2923 REAL_VALUE_TYPE cint, c, c2;
2926 enum machine_mode mode = TYPE_MODE (type);
2928 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2931 arg0 = CALL_EXPR_ARG (exp, 0);
2932 arg1 = CALL_EXPR_ARG (exp, 1);
2934 if (TREE_CODE (arg1) != REAL_CST
2935 || TREE_OVERFLOW (arg1))
2936 return expand_builtin_mathfn_2 (exp, target, subtarget);
2938 /* Handle constant exponents. */
2940 /* For integer valued exponents we can expand to an optimal multiplication
2941 sequence using expand_powi. */
2942 c = TREE_REAL_CST (arg1);
2943 n = real_to_integer (&c);
2944 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2945 if (real_identical (&c, &cint)
2946 && ((n >= -1 && n <= 2)
2947 || (flag_unsafe_math_optimizations
2948 && optimize_insn_for_speed_p ()
2949 && powi_cost (n) <= POWI_MAX_MULTS)))
2951 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2954 op = force_reg (mode, op);
2955 op = expand_powi (op, mode, n);
2960 narg0 = builtin_save_expr (arg0);
2962 /* If the exponent is not integer valued, check if it is half of an integer.
2963 In this case we can expand to sqrt (x) * x**(n/2). */
2964 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2965 if (fn != NULL_TREE)
2967 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2968 n = real_to_integer (&c2);
2969 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2970 if (real_identical (&c2, &cint)
2971 && ((flag_unsafe_math_optimizations
2972 && optimize_insn_for_speed_p ()
2973 && powi_cost (n/2) <= POWI_MAX_MULTS)
2976 tree call_expr = build_call_nofold (fn, 1, narg0);
2977 /* Use expand_expr in case the newly built call expression
2978 was folded to a non-call. */
2979 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2982 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 op2 = force_reg (mode, op2);
2984 op2 = expand_powi (op2, mode, abs (n / 2));
2985 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2986 0, OPTAB_LIB_WIDEN);
2987 /* If the original exponent was negative, reciprocate the
2990 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2991 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2997 /* Try if the exponent is a third of an integer. In this case
2998 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2999 different from pow (x, 1./3.) due to rounding and behavior
3000 with negative x we need to constrain this transformation to
3001 unsafe math and positive x or finite math. */
3002 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3004 && flag_unsafe_math_optimizations
3005 && (tree_expr_nonnegative_p (arg0)
3006 || !HONOR_NANS (mode)))
3008 REAL_VALUE_TYPE dconst3;
3009 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3010 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3011 real_round (&c2, mode, &c2);
3012 n = real_to_integer (&c2);
3013 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3014 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3015 real_convert (&c2, mode, &c2);
3016 if (real_identical (&c2, &c)
3017 && ((optimize_insn_for_speed_p ()
3018 && powi_cost (n/3) <= POWI_MAX_MULTS)
3021 tree call_expr = build_call_nofold (fn, 1,narg0);
3022 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3023 if (abs (n) % 3 == 2)
3024 op = expand_simple_binop (mode, MULT, op, op, op,
3025 0, OPTAB_LIB_WIDEN);
3028 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3029 op2 = force_reg (mode, op2);
3030 op2 = expand_powi (op2, mode, abs (n / 3));
3031 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3032 0, OPTAB_LIB_WIDEN);
3033 /* If the original exponent was negative, reciprocate the
3036 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3037 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3043 /* Fall back to optab expansion. */
3044 return expand_builtin_mathfn_2 (exp, target, subtarget);
3047 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3048 a normal call should be emitted rather than expanding the function
3049 in-line. EXP is the expression that is a call to the builtin
3050 function; if convenient, the result should be placed in TARGET. */
3053 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3057 enum machine_mode mode;
3058 enum machine_mode mode2;
3060 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3063 arg0 = CALL_EXPR_ARG (exp, 0);
3064 arg1 = CALL_EXPR_ARG (exp, 1);
3065 mode = TYPE_MODE (TREE_TYPE (exp));
3067 /* Handle constant power. */
3069 if (TREE_CODE (arg1) == INTEGER_CST
3070 && !TREE_OVERFLOW (arg1))
3072 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3074 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3075 Otherwise, check the number of multiplications required. */
3076 if ((TREE_INT_CST_HIGH (arg1) == 0
3077 || TREE_INT_CST_HIGH (arg1) == -1)
3078 && ((n >= -1 && n <= 2)
3079 || (optimize_insn_for_speed_p ()
3080 && powi_cost (n) <= POWI_MAX_MULTS)))
3082 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3083 op0 = force_reg (mode, op0);
3084 return expand_powi (op0, mode, n);
3088 /* Emit a libcall to libgcc. */
3090 /* Mode of the 2nd argument must match that of an int. */
3091 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3093 if (target == NULL_RTX)
3094 target = gen_reg_rtx (mode);
3096 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3097 if (GET_MODE (op0) != mode)
3098 op0 = convert_to_mode (mode, op0, 0);
3099 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3100 if (GET_MODE (op1) != mode2)
3101 op1 = convert_to_mode (mode2, op1, 0);
3103 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3104 target, LCT_CONST, mode, 2,
3105 op0, mode, op1, mode2);
3110 /* Expand expression EXP which is a call to the strlen builtin. Return
3111 NULL_RTX if we failed the caller should emit a normal call, otherwise
3112 try to get the result in TARGET, if convenient. */
3115 expand_builtin_strlen (tree exp, rtx target,
3116 enum machine_mode target_mode)
3118 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3124 tree src = CALL_EXPR_ARG (exp, 0);
3125 rtx result, src_reg, char_rtx, before_strlen;
3126 enum machine_mode insn_mode = target_mode, char_mode;
3127 enum insn_code icode = CODE_FOR_nothing;
3130 /* If the length can be computed at compile-time, return it. */
3131 len = c_strlen (src, 0);
3133 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3135 /* If the length can be computed at compile-time and is constant
3136 integer, but there are side-effects in src, evaluate
3137 src for side-effects, then return len.
3138 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3139 can be optimized into: i++; x = 3; */
3140 len = c_strlen (src, 1);
3141 if (len && TREE_CODE (len) == INTEGER_CST)
3143 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3144 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3147 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3149 /* If SRC is not a pointer type, don't do this operation inline. */
3153 /* Bail out if we can't compute strlen in the right mode. */
3154 while (insn_mode != VOIDmode)
3156 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3157 if (icode != CODE_FOR_nothing)
3160 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3162 if (insn_mode == VOIDmode)
3165 /* Make a place to write the result of the instruction. */
3169 && GET_MODE (result) == insn_mode
3170 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3171 result = gen_reg_rtx (insn_mode);
3173 /* Make a place to hold the source address. We will not expand
3174 the actual source until we are sure that the expansion will
3175 not fail -- there are trees that cannot be expanded twice. */
3176 src_reg = gen_reg_rtx (Pmode);
3178 /* Mark the beginning of the strlen sequence so we can emit the
3179 source operand later. */
3180 before_strlen = get_last_insn ();
3182 char_rtx = const0_rtx;
3183 char_mode = insn_data[(int) icode].operand[2].mode;
3184 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3186 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3188 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3189 char_rtx, GEN_INT (align));
3194 /* Now that we are assured of success, expand the source. */
3196 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3198 emit_move_insn (src_reg, pat);
3203 emit_insn_after (pat, before_strlen);
3205 emit_insn_before (pat, get_insns ());
3207 /* Return the value in the proper mode for this function. */
3208 if (GET_MODE (result) == target_mode)
3210 else if (target != 0)
3211 convert_move (target, result, 0);
3213 target = convert_to_mode (target_mode, result, 0);
3219 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3220 bytes from constant string DATA + OFFSET and return it as target
3224 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3225 enum machine_mode mode)
3227 const char *str = (const char *) data;
3229 gcc_assert (offset >= 0
3230 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3231 <= strlen (str) + 1));
3233 return c_readstr (str + offset, mode);
3236 /* Expand a call EXP to the memcpy builtin.
3237 Return NULL_RTX if we failed, the caller should emit a normal call,
3238 otherwise try to get the result in TARGET, if convenient (and in
3239 mode MODE if that's convenient). */
3242 expand_builtin_memcpy (tree exp, rtx target)
3244 if (!validate_arglist (exp,
3245 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3249 tree dest = CALL_EXPR_ARG (exp, 0);
3250 tree src = CALL_EXPR_ARG (exp, 1);
3251 tree len = CALL_EXPR_ARG (exp, 2);
3252 const char *src_str;
3253 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3254 unsigned int dest_align
3255 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3256 rtx dest_mem, src_mem, dest_addr, len_rtx;
3257 HOST_WIDE_INT expected_size = -1;
3258 unsigned int expected_align = 0;
3260 /* If DEST is not a pointer type, call the normal function. */
3261 if (dest_align == 0)
3264 /* If either SRC is not a pointer type, don't do this
3265 operation in-line. */
3269 if (currently_expanding_gimple_stmt)
3270 stringop_block_profile (currently_expanding_gimple_stmt,
3271 &expected_align, &expected_size);
3273 if (expected_align < dest_align)
3274 expected_align = dest_align;
3275 dest_mem = get_memory_rtx (dest, len);
3276 set_mem_align (dest_mem, dest_align);
3277 len_rtx = expand_normal (len);
3278 src_str = c_getstr (src);
3280 /* If SRC is a string constant and block move would be done
3281 by pieces, we can avoid loading the string from memory
3282 and only stored the computed constants. */
3284 && CONST_INT_P (len_rtx)
3285 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3286 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3287 CONST_CAST (char *, src_str),
3290 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3291 builtin_memcpy_read_str,
3292 CONST_CAST (char *, src_str),
3293 dest_align, false, 0);
3294 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3295 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3299 src_mem = get_memory_rtx (src, len);
3300 set_mem_align (src_mem, src_align);
3302 /* Copy word part most expediently. */
3303 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3304 CALL_EXPR_TAILCALL (exp)
3305 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3306 expected_align, expected_size);
3310 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3311 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3317 /* Expand a call EXP to the mempcpy builtin.
3318 Return NULL_RTX if we failed; the caller should emit a normal call,
3319 otherwise try to get the result in TARGET, if convenient (and in
3320 mode MODE if that's convenient). If ENDP is 0 return the
3321 destination pointer, if ENDP is 1 return the end pointer ala
3322 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3326 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3328 if (!validate_arglist (exp,
3329 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 1);
3335 tree len = CALL_EXPR_ARG (exp, 2);
3336 return expand_builtin_mempcpy_args (dest, src, len,
3337 target, mode, /*endp=*/ 1);
3341 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3342 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3343 so that this can also be called without constructing an actual CALL_EXPR.
3344 The other arguments and return value are the same as for
3345 expand_builtin_mempcpy. */
3348 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3349 rtx target, enum machine_mode mode, int endp)
3351 /* If return value is ignored, transform mempcpy into memcpy. */
3352 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3354 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3355 tree result = build_call_nofold (fn, 3, dest, src, len);
3356 return expand_expr (result, target, mode, EXPAND_NORMAL);
3360 const char *src_str;
3361 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3362 unsigned int dest_align
3363 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3364 rtx dest_mem, src_mem, len_rtx;
3366 /* If either SRC or DEST is not a pointer type, don't do this
3367 operation in-line. */
3368 if (dest_align == 0 || src_align == 0)
3371 /* If LEN is not constant, call the normal function. */
3372 if (! host_integerp (len, 1))
3375 len_rtx = expand_normal (len);
3376 src_str = c_getstr (src);
3378 /* If SRC is a string constant and block move would be done
3379 by pieces, we can avoid loading the string from memory
3380 and only stored the computed constants. */
3382 && CONST_INT_P (len_rtx)
3383 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3384 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3385 CONST_CAST (char *, src_str),
3388 dest_mem = get_memory_rtx (dest, len);
3389 set_mem_align (dest_mem, dest_align);
3390 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3391 builtin_memcpy_read_str,
3392 CONST_CAST (char *, src_str),
3393 dest_align, false, endp);
3394 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3395 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3399 if (CONST_INT_P (len_rtx)
3400 && can_move_by_pieces (INTVAL (len_rtx),
3401 MIN (dest_align, src_align)))
3403 dest_mem = get_memory_rtx (dest, len);
3404 set_mem_align (dest_mem, dest_align);
3405 src_mem = get_memory_rtx (src, len);
3406 set_mem_align (src_mem, src_align);
3407 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3408 MIN (dest_align, src_align), endp);
3409 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3410 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3419 # define HAVE_movstr 0
3420 # define CODE_FOR_movstr CODE_FOR_nothing
3423 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3424 we failed, the caller should emit a normal call, otherwise try to
3425 get the result in TARGET, if convenient. If ENDP is 0 return the
3426 destination pointer, if ENDP is 1 return the end pointer ala
3427 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3431 expand_movstr (tree dest, tree src, rtx target, int endp)
3437 const struct insn_data * data;
3442 dest_mem = get_memory_rtx (dest, NULL);
3443 src_mem = get_memory_rtx (src, NULL);
3446 target = force_reg (Pmode, XEXP (dest_mem, 0));
3447 dest_mem = replace_equiv_address (dest_mem, target);
3448 end = gen_reg_rtx (Pmode);
3452 if (target == 0 || target == const0_rtx)
3454 end = gen_reg_rtx (Pmode);
3462 data = insn_data + CODE_FOR_movstr;
3464 if (data->operand[0].mode != VOIDmode)
3465 end = gen_lowpart (data->operand[0].mode, end);
3467 insn = data->genfun (end, dest_mem, src_mem);
3473 /* movstr is supposed to set end to the address of the NUL
3474 terminator. If the caller requested a mempcpy-like return value,
3476 if (endp == 1 && target != const0_rtx)
3478 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3479 emit_move_insn (target, force_operand (tem, NULL_RTX));
3485 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3486 NULL_RTX if we failed the caller should emit a normal call, otherwise
3487 try to get the result in TARGET, if convenient (and in mode MODE if that's
3491 expand_builtin_strcpy (tree exp, rtx target)
3493 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3495 tree dest = CALL_EXPR_ARG (exp, 0);
3496 tree src = CALL_EXPR_ARG (exp, 1);
3497 return expand_builtin_strcpy_args (dest, src, target);
3502 /* Helper function to do the actual work for expand_builtin_strcpy. The
3503 arguments to the builtin_strcpy call DEST and SRC are broken out
3504 so that this can also be called without constructing an actual CALL_EXPR.
3505 The other arguments and return value are the same as for
3506 expand_builtin_strcpy. */
3509 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3511 return expand_movstr (dest, src, target, /*endp=*/0);
3514 /* Expand a call EXP to the stpcpy builtin.
3515 Return NULL_RTX if we failed the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient (and in
3517 mode MODE if that's convenient). */
3520 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3523 location_t loc = EXPR_LOCATION (exp);
3525 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 dst = CALL_EXPR_ARG (exp, 0);
3529 src = CALL_EXPR_ARG (exp, 1);
3531 /* If return value is ignored, transform stpcpy into strcpy. */
3532 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3534 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3535 tree result = build_call_nofold (fn, 2, dst, src);
3536 return expand_expr (result, target, mode, EXPAND_NORMAL);
3543 /* Ensure we get an actual string whose length can be evaluated at
3544 compile-time, not an expression containing a string. This is
3545 because the latter will potentially produce pessimized code
3546 when used to produce the return value. */
3547 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3548 return expand_movstr (dst, src, target, /*endp=*/2);
3550 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3551 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3552 target, mode, /*endp=*/2);
3557 if (TREE_CODE (len) == INTEGER_CST)
3559 rtx len_rtx = expand_normal (len);
3561 if (CONST_INT_P (len_rtx))
3563 ret = expand_builtin_strcpy_args (dst, src, target);
3569 if (mode != VOIDmode)
3570 target = gen_reg_rtx (mode);
3572 target = gen_reg_rtx (GET_MODE (ret));
3574 if (GET_MODE (target) != GET_MODE (ret))
3575 ret = gen_lowpart (GET_MODE (target), ret);
3577 ret = plus_constant (ret, INTVAL (len_rtx));
3578 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3586 return expand_movstr (dst, src, target, /*endp=*/2);
3590 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3591 bytes from constant string DATA + OFFSET and return it as target
3595 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3596 enum machine_mode mode)
3598 const char *str = (const char *) data;
3600 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3603 return c_readstr (str + offset, mode);
3606 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3607 NULL_RTX if we failed the caller should emit a normal call. */
3610 expand_builtin_strncpy (tree exp, rtx target)
3612 location_t loc = EXPR_LOCATION (exp);
3614 if (validate_arglist (exp,
3615 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3617 tree dest = CALL_EXPR_ARG (exp, 0);
3618 tree src = CALL_EXPR_ARG (exp, 1);
3619 tree len = CALL_EXPR_ARG (exp, 2);
3620 tree slen = c_strlen (src, 1);
3622 /* We must be passed a constant len and src parameter. */
3623 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3626 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3628 /* We're required to pad with trailing zeros if the requested
3629 len is greater than strlen(s2)+1. In that case try to
3630 use store_by_pieces, if it fails, punt. */
3631 if (tree_int_cst_lt (slen, len))
3633 unsigned int dest_align
3634 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3635 const char *p = c_getstr (src);
3638 if (!p || dest_align == 0 || !host_integerp (len, 1)
3639 || !can_store_by_pieces (tree_low_cst (len, 1),
3640 builtin_strncpy_read_str,
3641 CONST_CAST (char *, p),
3645 dest_mem = get_memory_rtx (dest, len);
3646 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3647 builtin_strncpy_read_str,
3648 CONST_CAST (char *, p), dest_align, false, 0);
3649 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3650 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3657 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3658 bytes from constant string DATA + OFFSET and return it as target
3662 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3663 enum machine_mode mode)
3665 const char *c = (const char *) data;
3666 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3668 memset (p, *c, GET_MODE_SIZE (mode));
3670 return c_readstr (p, mode);
3673 /* Callback routine for store_by_pieces. Return the RTL of a register
3674 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3675 char value given in the RTL register data. For example, if mode is
3676 4 bytes wide, return the RTL for 0x01010101*data. */
3679 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3680 enum machine_mode mode)
3686 size = GET_MODE_SIZE (mode);
3690 p = XALLOCAVEC (char, size);
3691 memset (p, 1, size);
3692 coeff = c_readstr (p, mode);
3694 target = convert_to_mode (mode, (rtx) data, 1);
3695 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3696 return force_reg (mode, target);
3699 /* Expand expression EXP, which is a call to the memset builtin. Return
3700 NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3705 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3707 if (!validate_arglist (exp,
3708 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3712 tree dest = CALL_EXPR_ARG (exp, 0);
3713 tree val = CALL_EXPR_ARG (exp, 1);
3714 tree len = CALL_EXPR_ARG (exp, 2);
3715 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3719 /* Helper function to do the actual work for expand_builtin_memset. The
3720 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3721 so that this can also be called without constructing an actual CALL_EXPR.
3722 The other arguments and return value are the same as for
3723 expand_builtin_memset. */
3726 expand_builtin_memset_args (tree dest, tree val, tree len,
3727 rtx target, enum machine_mode mode, tree orig_exp)
3730 enum built_in_function fcode;
3732 unsigned int dest_align;
3733 rtx dest_mem, dest_addr, len_rtx;
3734 HOST_WIDE_INT expected_size = -1;
3735 unsigned int expected_align = 0;
3737 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3739 /* If DEST is not a pointer type, don't do this operation in-line. */
3740 if (dest_align == 0)
3743 if (currently_expanding_gimple_stmt)
3744 stringop_block_profile (currently_expanding_gimple_stmt,
3745 &expected_align, &expected_size);
3747 if (expected_align < dest_align)
3748 expected_align = dest_align;
3750 /* If the LEN parameter is zero, return DEST. */
3751 if (integer_zerop (len))
3753 /* Evaluate and ignore VAL in case it has side-effects. */
3754 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3755 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3758 /* Stabilize the arguments in case we fail. */
3759 dest = builtin_save_expr (dest);
3760 val = builtin_save_expr (val);
3761 len = builtin_save_expr (len);
3763 len_rtx = expand_normal (len);
3764 dest_mem = get_memory_rtx (dest, len);
3766 if (TREE_CODE (val) != INTEGER_CST)
3770 val_rtx = expand_normal (val);
3771 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3774 /* Assume that we can memset by pieces if we can store
3775 * the coefficients by pieces (in the required modes).
3776 * We can't pass builtin_memset_gen_str as that emits RTL. */
3778 if (host_integerp (len, 1)
3779 && can_store_by_pieces (tree_low_cst (len, 1),
3780 builtin_memset_read_str, &c, dest_align,
3783 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3785 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3786 builtin_memset_gen_str, val_rtx, dest_align,
3789 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3790 dest_align, expected_align,
3794 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3795 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3799 if (target_char_cast (val, &c))
3804 if (host_integerp (len, 1)
3805 && can_store_by_pieces (tree_low_cst (len, 1),
3806 builtin_memset_read_str, &c, dest_align,
3808 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3809 builtin_memset_read_str, &c, dest_align, true, 0);
3810 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3811 dest_align, expected_align,
3815 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3816 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3820 set_mem_align (dest_mem, dest_align);
3821 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3822 CALL_EXPR_TAILCALL (orig_exp)
3823 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3824 expected_align, expected_size);
3828 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3829 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3835 fndecl = get_callee_fndecl (orig_exp);
3836 fcode = DECL_FUNCTION_CODE (fndecl);
3837 if (fcode == BUILT_IN_MEMSET)
3838 fn = build_call_nofold (fndecl, 3, dest, val, len);
3839 else if (fcode == BUILT_IN_BZERO)
3840 fn = build_call_nofold (fndecl, 2, dest, len);
3843 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3844 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3845 return expand_call (fn, target, target == const0_rtx);
3848 /* Expand expression EXP, which is a call to the bzero builtin. Return
3849 NULL_RTX if we failed the caller should emit a normal call. */
3852 expand_builtin_bzero (tree exp)
3855 location_t loc = EXPR_LOCATION (exp);
3857 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3860 dest = CALL_EXPR_ARG (exp, 0);
3861 size = CALL_EXPR_ARG (exp, 1);
3863 /* New argument list transforming bzero(ptr x, int y) to
3864 memset(ptr x, int 0, size_t y). This is done this way
3865 so that if it isn't expanded inline, we fallback to
3866 calling bzero instead of memset. */
3868 return expand_builtin_memset_args (dest, integer_zero_node,
3869 fold_convert_loc (loc, sizetype, size),
3870 const0_rtx, VOIDmode, exp);
3873 /* Expand expression EXP, which is a call to the memcmp built-in function.
3874 Return NULL_RTX if we failed and the
3875 caller should emit a normal call, otherwise try to get the result in
3876 TARGET, if convenient (and in mode MODE, if that's convenient). */
3879 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3880 ATTRIBUTE_UNUSED enum machine_mode mode)
3882 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3884 if (!validate_arglist (exp,
3885 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3888 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3890 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3893 tree arg1 = CALL_EXPR_ARG (exp, 0);
3894 tree arg2 = CALL_EXPR_ARG (exp, 1);
3895 tree len = CALL_EXPR_ARG (exp, 2);
3898 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3900 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3901 enum machine_mode insn_mode;
3903 #ifdef HAVE_cmpmemsi
3905 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3908 #ifdef HAVE_cmpstrnsi
3910 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3915 /* If we don't have POINTER_TYPE, call the function. */
3916 if (arg1_align == 0 || arg2_align == 0)
3919 /* Make a place to write the result of the instruction. */
3922 && REG_P (result) && GET_MODE (result) == insn_mode
3923 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3924 result = gen_reg_rtx (insn_mode);
3926 arg1_rtx = get_memory_rtx (arg1, len);
3927 arg2_rtx = get_memory_rtx (arg2, len);
3928 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3930 /* Set MEM_SIZE as appropriate. */
3931 if (CONST_INT_P (arg3_rtx))
3933 set_mem_size (arg1_rtx, arg3_rtx);
3934 set_mem_size (arg2_rtx, arg3_rtx);
3937 #ifdef HAVE_cmpmemsi
3939 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3940 GEN_INT (MIN (arg1_align, arg2_align)));
3943 #ifdef HAVE_cmpstrnsi
3945 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3946 GEN_INT (MIN (arg1_align, arg2_align)));
3954 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3955 TYPE_MODE (integer_type_node), 3,
3956 XEXP (arg1_rtx, 0), Pmode,
3957 XEXP (arg2_rtx, 0), Pmode,
3958 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3959 TYPE_UNSIGNED (sizetype)),
3960 TYPE_MODE (sizetype));
3962 /* Return the value in the proper mode for this function. */
3963 mode = TYPE_MODE (TREE_TYPE (exp));
3964 if (GET_MODE (result) == mode)
3966 else if (target != 0)
3968 convert_move (target, result, 0);
3972 return convert_to_mode (mode, result, 0);
3979 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3980 if we failed the caller should emit a normal call, otherwise try to get
3981 the result in TARGET, if convenient. */
3984 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3986 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3989 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3990 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3991 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3993 rtx arg1_rtx, arg2_rtx;
3994 rtx result, insn = NULL_RTX;
3996 tree arg1 = CALL_EXPR_ARG (exp, 0);
3997 tree arg2 = CALL_EXPR_ARG (exp, 1);
4000 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4002 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4004 /* If we don't have POINTER_TYPE, call the function. */
4005 if (arg1_align == 0 || arg2_align == 0)
4008 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4009 arg1 = builtin_save_expr (arg1);
4010 arg2 = builtin_save_expr (arg2);
4012 arg1_rtx = get_memory_rtx (arg1, NULL);
4013 arg2_rtx = get_memory_rtx (arg2, NULL);
4015 #ifdef HAVE_cmpstrsi
4016 /* Try to call cmpstrsi. */
4019 enum machine_mode insn_mode
4020 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4022 /* Make a place to write the result of the instruction. */
4025 && REG_P (result) && GET_MODE (result) == insn_mode
4026 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4027 result = gen_reg_rtx (insn_mode);
4029 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4030 GEN_INT (MIN (arg1_align, arg2_align)));
4033 #ifdef HAVE_cmpstrnsi
4034 /* Try to determine at least one length and call cmpstrnsi. */
4035 if (!insn && HAVE_cmpstrnsi)
4040 enum machine_mode insn_mode
4041 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4042 tree len1 = c_strlen (arg1, 1);
4043 tree len2 = c_strlen (arg2, 1);
4046 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4048 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4050 /* If we don't have a constant length for the first, use the length
4051 of the second, if we know it. We don't require a constant for
4052 this case; some cost analysis could be done if both are available
4053 but neither is constant. For now, assume they're equally cheap,
4054 unless one has side effects. If both strings have constant lengths,
4061 else if (TREE_SIDE_EFFECTS (len1))
4063 else if (TREE_SIDE_EFFECTS (len2))
4065 else if (TREE_CODE (len1) != INTEGER_CST)
4067 else if (TREE_CODE (len2) != INTEGER_CST)
4069 else if (tree_int_cst_lt (len1, len2))
4074 /* If both arguments have side effects, we cannot optimize. */
4075 if (!len || TREE_SIDE_EFFECTS (len))
4078 arg3_rtx = expand_normal (len);
4080 /* Make a place to write the result of the instruction. */
4083 && REG_P (result) && GET_MODE (result) == insn_mode
4084 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4085 result = gen_reg_rtx (insn_mode);
4087 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4088 GEN_INT (MIN (arg1_align, arg2_align)));
4094 enum machine_mode mode;
4097 /* Return the value in the proper mode for this function. */
4098 mode = TYPE_MODE (TREE_TYPE (exp));
4099 if (GET_MODE (result) == mode)
4102 return convert_to_mode (mode, result, 0);
4103 convert_move (target, result, 0);
4107 /* Expand the library call ourselves using a stabilized argument
4108 list to avoid re-evaluating the function's arguments twice. */
4109 #ifdef HAVE_cmpstrnsi
4112 fndecl = get_callee_fndecl (exp);
4113 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4114 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4115 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4116 return expand_call (fn, target, target == const0_rtx);
4122 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4123 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4124 the result in TARGET, if convenient. */
4127 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4128 ATTRIBUTE_UNUSED enum machine_mode mode)
4130 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4132 if (!validate_arglist (exp,
4133 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4136 /* If c_strlen can determine an expression for one of the string
4137 lengths, and it doesn't have side effects, then emit cmpstrnsi
4138 using length MIN(strlen(string)+1, arg3). */
4139 #ifdef HAVE_cmpstrnsi
4142 tree len, len1, len2;
4143 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4146 tree arg1 = CALL_EXPR_ARG (exp, 0);
4147 tree arg2 = CALL_EXPR_ARG (exp, 1);
4148 tree arg3 = CALL_EXPR_ARG (exp, 2);
4151 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4153 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4154 enum machine_mode insn_mode
4155 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4157 len1 = c_strlen (arg1, 1);
4158 len2 = c_strlen (arg2, 1);
4161 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4163 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4165 /* If we don't have a constant length for the first, use the length
4166 of the second, if we know it. We don't require a constant for
4167 this case; some cost analysis could be done if both are available
4168 but neither is constant. For now, assume they're equally cheap,
4169 unless one has side effects. If both strings have constant lengths,
4176 else if (TREE_SIDE_EFFECTS (len1))
4178 else if (TREE_SIDE_EFFECTS (len2))
4180 else if (TREE_CODE (len1) != INTEGER_CST)
4182 else if (TREE_CODE (len2) != INTEGER_CST)
4184 else if (tree_int_cst_lt (len1, len2))
4189 /* If both arguments have side effects, we cannot optimize. */
4190 if (!len || TREE_SIDE_EFFECTS (len))
4193 /* The actual new length parameter is MIN(len,arg3). */
4194 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4195 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4197 /* If we don't have POINTER_TYPE, call the function. */
4198 if (arg1_align == 0 || arg2_align == 0)
4201 /* Make a place to write the result of the instruction. */
4204 && REG_P (result) && GET_MODE (result) == insn_mode
4205 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4206 result = gen_reg_rtx (insn_mode);
4208 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4209 arg1 = builtin_save_expr (arg1);
4210 arg2 = builtin_save_expr (arg2);
4211 len = builtin_save_expr (len);
4213 arg1_rtx = get_memory_rtx (arg1, len);
4214 arg2_rtx = get_memory_rtx (arg2, len);
4215 arg3_rtx = expand_normal (len);
4216 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4217 GEN_INT (MIN (arg1_align, arg2_align)));
4222 /* Return the value in the proper mode for this function. */
4223 mode = TYPE_MODE (TREE_TYPE (exp));
4224 if (GET_MODE (result) == mode)
4227 return convert_to_mode (mode, result, 0);
4228 convert_move (target, result, 0);
4232 /* Expand the library call ourselves using a stabilized argument
4233 list to avoid re-evaluating the function's arguments twice. */
4234 fndecl = get_callee_fndecl (exp);
4235 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4236 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4237 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4238 return expand_call (fn, target, target == const0_rtx);
4244 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4245 if that's convenient. */
4248 expand_builtin_saveregs (void)
4252 /* Don't do __builtin_saveregs more than once in a function.
4253 Save the result of the first call and reuse it. */
4254 if (saveregs_value != 0)
4255 return saveregs_value;
4257 /* When this function is called, it means that registers must be
4258 saved on entry to this function. So we migrate the call to the
4259 first insn of this function. */
4263 /* Do whatever the machine needs done in this case. */
4264 val = targetm.calls.expand_builtin_saveregs ();
4269 saveregs_value = val;
4271 /* Put the insns after the NOTE that starts the function. If this
4272 is inside a start_sequence, make the outer-level insn chain current, so
4273 the code is placed at the start of the function. */
4274 push_topmost_sequence ();
4275 emit_insn_after (seq, entry_of_function ());
4276 pop_topmost_sequence ();
4281 /* __builtin_args_info (N) returns word N of the arg space info
4282 for the current function. The number and meanings of words
4283 is controlled by the definition of CUMULATIVE_ARGS. */
4286 expand_builtin_args_info (tree exp)
4288 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4289 int *word_ptr = (int *) &crtl->args.info;
4291 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4293 if (call_expr_nargs (exp) != 0)
4295 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4296 error ("argument of %<__builtin_args_info%> must be constant");
4299 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4301 if (wordnum < 0 || wordnum >= nwords)
4302 error ("argument of %<__builtin_args_info%> out of range");
4304 return GEN_INT (word_ptr[wordnum]);
4308 error ("missing argument in %<__builtin_args_info%>");
4313 /* Expand a call to __builtin_next_arg. */
4316 expand_builtin_next_arg (void)
4318 /* Checking arguments is already done in fold_builtin_next_arg
4319 that must be called before this function. */
4320 return expand_binop (ptr_mode, add_optab,
4321 crtl->args.internal_arg_pointer,
4322 crtl->args.arg_offset_rtx,
4323 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4326 /* Make it easier for the backends by protecting the valist argument
4327 from multiple evaluations. */
4330 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4332 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4334 gcc_assert (vatype != NULL_TREE);
4336 if (TREE_CODE (vatype) == ARRAY_TYPE)
4338 if (TREE_SIDE_EFFECTS (valist))
4339 valist = save_expr (valist);
4341 /* For this case, the backends will be expecting a pointer to
4342 vatype, but it's possible we've actually been given an array
4343 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4345 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4347 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4348 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4357 if (! TREE_SIDE_EFFECTS (valist))
4360 pt = build_pointer_type (vatype);
4361 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4362 TREE_SIDE_EFFECTS (valist) = 1;
4365 if (TREE_SIDE_EFFECTS (valist))
4366 valist = save_expr (valist);
4367 valist = build_fold_indirect_ref_loc (loc, valist);
4373 /* The "standard" definition of va_list is void*. */
4376 std_build_builtin_va_list (void)
4378 return ptr_type_node;
4381 /* The "standard" abi va_list is va_list_type_node. */
4384 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4386 return va_list_type_node;
4389 /* The "standard" type of va_list is va_list_type_node. */
4392 std_canonical_va_list_type (tree type)
4396 if (INDIRECT_REF_P (type))
4397 type = TREE_TYPE (type);
4398 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4399 type = TREE_TYPE (type);
4400 wtype = va_list_type_node;
4402 /* Treat structure va_list types. */
4403 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4404 htype = TREE_TYPE (htype);
4405 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4407 /* If va_list is an array type, the argument may have decayed
4408 to a pointer type, e.g. by being passed to another function.
4409 In that case, unwrap both types so that we can compare the
4410 underlying records. */
4411 if (TREE_CODE (htype) == ARRAY_TYPE
4412 || POINTER_TYPE_P (htype))
4414 wtype = TREE_TYPE (wtype);
4415 htype = TREE_TYPE (htype);
4418 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4419 return va_list_type_node;
4424 /* The "standard" implementation of va_start: just assign `nextarg' to
4428 std_expand_builtin_va_start (tree valist, rtx nextarg)
4430 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4431 convert_move (va_r, nextarg, 0);
4434 /* Expand EXP, a call to __builtin_va_start. */
4437 expand_builtin_va_start (tree exp)
4441 location_t loc = EXPR_LOCATION (exp);
4443 if (call_expr_nargs (exp) < 2)
4445 error_at (loc, "too few arguments to function %<va_start%>");
4449 if (fold_builtin_next_arg (exp, true))
4452 nextarg = expand_builtin_next_arg ();
4453 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4455 if (targetm.expand_builtin_va_start)
4456 targetm.expand_builtin_va_start (valist, nextarg);
4458 std_expand_builtin_va_start (valist, nextarg);
4463 /* The "standard" implementation of va_arg: read the value from the
4464 current (padded) address and increment by the (padded) size. */
4467 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4470 tree addr, t, type_size, rounded_size, valist_tmp;
4471 unsigned HOST_WIDE_INT align, boundary;
4474 #ifdef ARGS_GROW_DOWNWARD
4475 /* All of the alignment and movement below is for args-grow-up machines.
4476 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4477 implement their own specialized gimplify_va_arg_expr routines. */
4481 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4483 type = build_pointer_type (type);
4485 align = PARM_BOUNDARY / BITS_PER_UNIT;
4486 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4488 /* When we align parameter on stack for caller, if the parameter
4489 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4490 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4491 here with caller. */
4492 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4493 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4495 boundary /= BITS_PER_UNIT;
4497 /* Hoist the valist value into a temporary for the moment. */
4498 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4500 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4501 requires greater alignment, we must perform dynamic alignment. */
4502 if (boundary > align
4503 && !integer_zerop (TYPE_SIZE (type)))
4505 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4506 fold_build2 (POINTER_PLUS_EXPR,
4508 valist_tmp, size_int (boundary - 1)));
4509 gimplify_and_add (t, pre_p);
4511 t = fold_convert (sizetype, valist_tmp);
4512 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4513 fold_convert (TREE_TYPE (valist),
4514 fold_build2 (BIT_AND_EXPR, sizetype, t,
4515 size_int (-boundary))));
4516 gimplify_and_add (t, pre_p);
4521 /* If the actual alignment is less than the alignment of the type,
4522 adjust the type accordingly so that we don't assume strict alignment
4523 when dereferencing the pointer. */
4524 boundary *= BITS_PER_UNIT;
4525 if (boundary < TYPE_ALIGN (type))
4527 type = build_variant_type_copy (type);
4528 TYPE_ALIGN (type) = boundary;
4531 /* Compute the rounded size of the type. */
4532 type_size = size_in_bytes (type);
4533 rounded_size = round_up (type_size, align);
4535 /* Reduce rounded_size so it's sharable with the postqueue. */
4536 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4540 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4542 /* Small args are padded downward. */
4543 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4544 rounded_size, size_int (align));
4545 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4546 size_binop (MINUS_EXPR, rounded_size, type_size));
4547 addr = fold_build2 (POINTER_PLUS_EXPR,
4548 TREE_TYPE (addr), addr, t);
4551 /* Compute new value for AP. */
4552 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4553 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4554 gimplify_and_add (t, pre_p);
4556 addr = fold_convert (build_pointer_type (type), addr);
4559 addr = build_va_arg_indirect_ref (addr);
4561 return build_va_arg_indirect_ref (addr);
4564 /* Build an indirect-ref expression over the given TREE, which represents a
4565 piece of a va_arg() expansion. */
4567 build_va_arg_indirect_ref (tree addr)
4569 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4571 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4577 /* Return a dummy expression of type TYPE in order to keep going after an
4581 dummy_object (tree type)
4583 tree t = build_int_cst (build_pointer_type (type), 0);
4584 return build1 (INDIRECT_REF, type, t);
4587 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4588 builtin function, but a very special sort of operator. */
4590 enum gimplify_status
4591 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4593 tree promoted_type, have_va_type;
4594 tree valist = TREE_OPERAND (*expr_p, 0);
4595 tree type = TREE_TYPE (*expr_p);
4597 location_t loc = EXPR_LOCATION (*expr_p);
4599 /* Verify that valist is of the proper type. */
4600 have_va_type = TREE_TYPE (valist);
4601 if (have_va_type == error_mark_node)
4603 have_va_type = targetm.canonical_va_list_type (have_va_type);
4605 if (have_va_type == NULL_TREE)
4607 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4611 /* Generate a diagnostic for requesting data of a type that cannot
4612 be passed through `...' due to type promotion at the call site. */
4613 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4616 static bool gave_help;
4619 /* Unfortunately, this is merely undefined, rather than a constraint
4620 violation, so we cannot make this an error. If this call is never
4621 executed, the program is still strictly conforming. */
4622 warned = warning_at (loc, 0,
4623 "%qT is promoted to %qT when passed through %<...%>",
4624 type, promoted_type);
4625 if (!gave_help && warned)
4628 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4629 promoted_type, type);
4632 /* We can, however, treat "undefined" any way we please.
4633 Call abort to encourage the user to fix the program. */
4635 inform (loc, "if this code is reached, the program will abort");
4636 /* Before the abort, allow the evaluation of the va_list
4637 expression to exit or longjmp. */
4638 gimplify_and_add (valist, pre_p);
4639 t = build_call_expr_loc (loc,
4640 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4641 gimplify_and_add (t, pre_p);
4643 /* This is dead code, but go ahead and finish so that the
4644 mode of the result comes out right. */
4645 *expr_p = dummy_object (type);
4650 /* Make it easier for the backends by protecting the valist argument
4651 from multiple evaluations. */
4652 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4654 /* For this case, the backends will be expecting a pointer to
4655 TREE_TYPE (abi), but it's possible we've
4656 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4658 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4660 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4661 valist = fold_convert_loc (loc, p1,
4662 build_fold_addr_expr_loc (loc, valist));
4665 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4668 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4670 if (!targetm.gimplify_va_arg_expr)
4671 /* FIXME: Once most targets are converted we should merely
4672 assert this is non-null. */
4675 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4680 /* Expand EXP, a call to __builtin_va_end. */
4683 expand_builtin_va_end (tree exp)
4685 tree valist = CALL_EXPR_ARG (exp, 0);
4687 /* Evaluate for side effects, if needed. I hate macros that don't
4689 if (TREE_SIDE_EFFECTS (valist))
4690 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4695 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4696 builtin rather than just as an assignment in stdarg.h because of the
4697 nastiness of array-type va_list types. */
4700 expand_builtin_va_copy (tree exp)
4703 location_t loc = EXPR_LOCATION (exp);
4705 dst = CALL_EXPR_ARG (exp, 0);
4706 src = CALL_EXPR_ARG (exp, 1);
4708 dst = stabilize_va_list_loc (loc, dst, 1);
4709 src = stabilize_va_list_loc (loc, src, 0);
4711 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4713 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4715 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4716 TREE_SIDE_EFFECTS (t) = 1;
4717 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4721 rtx dstb, srcb, size;
4723 /* Evaluate to pointers. */
4724 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4725 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4726 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4727 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4729 dstb = convert_memory_address (Pmode, dstb);
4730 srcb = convert_memory_address (Pmode, srcb);
4732 /* "Dereference" to BLKmode memories. */
4733 dstb = gen_rtx_MEM (BLKmode, dstb);
4734 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4735 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4736 srcb = gen_rtx_MEM (BLKmode, srcb);
4737 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4738 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4741 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4747 /* Expand a call to one of the builtin functions __builtin_frame_address or
4748 __builtin_return_address. */
4751 expand_builtin_frame_address (tree fndecl, tree exp)
4753 /* The argument must be a nonnegative integer constant.
4754 It counts the number of frames to scan up the stack.
4755 The value is the return address saved in that frame. */
4756 if (call_expr_nargs (exp) == 0)
4757 /* Warning about missing arg was already issued. */
4759 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4761 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4762 error ("invalid argument to %<__builtin_frame_address%>");
4764 error ("invalid argument to %<__builtin_return_address%>");
4770 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4771 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4773 /* Some ports cannot access arbitrary stack frames. */
4776 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4777 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4779 warning (0, "unsupported argument to %<__builtin_return_address%>");
4783 /* For __builtin_frame_address, return what we've got. */
4784 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4788 && ! CONSTANT_P (tem))
4789 tem = copy_to_mode_reg (Pmode, tem);
4794 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4795 we failed and the caller should emit a normal call, otherwise try to get
4796 the result in TARGET, if convenient. */
4799 expand_builtin_alloca (tree exp, rtx target)
4804 /* Emit normal call if marked not-inlineable. */
4805 if (CALL_CANNOT_INLINE_P (exp))
4808 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4811 /* Compute the argument. */
4812 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4814 /* Allocate the desired space. */
4815 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4816 result = convert_memory_address (ptr_mode, result);
4821 /* Expand a call to a bswap builtin with argument ARG0. MODE
4822 is the mode to expand with. */
4825 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4827 enum machine_mode mode;
4831 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4834 arg = CALL_EXPR_ARG (exp, 0);
4835 mode = TYPE_MODE (TREE_TYPE (arg));
4836 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4838 target = expand_unop (mode, bswap_optab, op0, target, 1);
4840 gcc_assert (target);
4842 return convert_to_mode (mode, target, 0);
4845 /* Expand a call to a unary builtin in EXP.
4846 Return NULL_RTX if a normal call should be emitted rather than expanding the
4847 function in-line. If convenient, the result should be placed in TARGET.
4848 SUBTARGET may be used as the target for computing one of EXP's operands. */
4851 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4852 rtx subtarget, optab op_optab)
4856 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4859 /* Compute the argument. */
4860 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4861 VOIDmode, EXPAND_NORMAL);
4862 /* Compute op, into TARGET if possible.
4863 Set TARGET to wherever the result comes back. */
4864 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4865 op_optab, op0, target, 1);
4866 gcc_assert (target);
4868 return convert_to_mode (target_mode, target, 0);
4871 /* Expand a call to __builtin_expect. We just return our argument
4872 as the builtin_expect semantic should've been already executed by
4873 tree branch prediction pass. */
4876 expand_builtin_expect (tree exp, rtx target)
4880 if (call_expr_nargs (exp) < 2)
4882 arg = CALL_EXPR_ARG (exp, 0);
4884 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4885 /* When guessing was done, the hints should be already stripped away. */
4886 gcc_assert (!flag_guess_branch_prob
4887 || optimize == 0 || errorcount || sorrycount);
4892 expand_builtin_trap (void)
4896 emit_insn (gen_trap ());
4899 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4903 /* Expand a call to __builtin_unreachable. We do nothing except emit
4904 a barrier saying that control flow will not pass here.
4906 It is the responsibility of the program being compiled to ensure
4907 that control flow does never reach __builtin_unreachable. */
4909 expand_builtin_unreachable (void)
4914 /* Expand EXP, a call to fabs, fabsf or fabsl.
4915 Return NULL_RTX if a normal call should be emitted rather than expanding
4916 the function inline. If convenient, the result should be placed
4917 in TARGET. SUBTARGET may be used as the target for computing
4921 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4923 enum machine_mode mode;
4927 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4930 arg = CALL_EXPR_ARG (exp, 0);
4931 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4932 mode = TYPE_MODE (TREE_TYPE (arg));
4933 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4934 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4937 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4938 Return NULL is a normal call should be emitted rather than expanding the
4939 function inline. If convenient, the result should be placed in TARGET.
4940 SUBTARGET may be used as the target for computing the operand. */
4943 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4948 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4951 arg = CALL_EXPR_ARG (exp, 0);
4952 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4954 arg = CALL_EXPR_ARG (exp, 1);
4955 op1 = expand_normal (arg);
4957 return expand_copysign (op0, op1, target);
4960 /* Create a new constant string literal and return a char* pointer to it.
4961 The STRING_CST value is the LEN characters at STR. */
4963 build_string_literal (int len, const char *str)
4965 tree t, elem, index, type;
4967 t = build_string (len, str);
4968 elem = build_type_variant (char_type_node, 1, 0);
4969 index = build_index_type (size_int (len - 1));
4970 type = build_array_type (elem, index);
4971 TREE_TYPE (t) = type;
4972 TREE_CONSTANT (t) = 1;
4973 TREE_READONLY (t) = 1;
4974 TREE_STATIC (t) = 1;
4976 type = build_pointer_type (elem);
4977 t = build1 (ADDR_EXPR, type,
4978 build4 (ARRAY_REF, elem,
4979 t, integer_zero_node, NULL_TREE, NULL_TREE));
4983 /* Expand a call to either the entry or exit function profiler. */
4986 expand_builtin_profile_func (bool exitp)
4988 rtx this_rtx, which;
4990 this_rtx = DECL_RTL (current_function_decl);
4991 gcc_assert (MEM_P (this_rtx));
4992 this_rtx = XEXP (this_rtx, 0);
4995 which = profile_function_exit_libfunc;
4997 which = profile_function_entry_libfunc;
4999 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5000 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5007 /* Expand a call to __builtin___clear_cache. */
5010 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5012 #ifndef HAVE_clear_cache
5013 #ifdef CLEAR_INSN_CACHE
5014 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5015 does something. Just do the default expansion to a call to
5019 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5020 does nothing. There is no need to call it. Do nothing. */
5022 #endif /* CLEAR_INSN_CACHE */
5024 /* We have a "clear_cache" insn, and it will handle everything. */
5026 rtx begin_rtx, end_rtx;
5027 enum insn_code icode;
5029 /* We must not expand to a library call. If we did, any
5030 fallback library function in libgcc that might contain a call to
5031 __builtin___clear_cache() would recurse infinitely. */
5032 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5034 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5038 if (HAVE_clear_cache)
5040 icode = CODE_FOR_clear_cache;
5042 begin = CALL_EXPR_ARG (exp, 0);
5043 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5044 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5045 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5046 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5048 end = CALL_EXPR_ARG (exp, 1);
5049 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5050 end_rtx = convert_memory_address (Pmode, end_rtx);
5051 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5052 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5054 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5057 #endif /* HAVE_clear_cache */
5060 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5063 round_trampoline_addr (rtx tramp)
5065 rtx temp, addend, mask;
5067 /* If we don't need too much alignment, we'll have been guaranteed
5068 proper alignment by get_trampoline_type. */
5069 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5072 /* Round address up to desired boundary. */
5073 temp = gen_reg_rtx (Pmode);
5074 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5075 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5077 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5078 temp, 0, OPTAB_LIB_WIDEN);
5079 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5080 temp, 0, OPTAB_LIB_WIDEN);
5086 expand_builtin_init_trampoline (tree exp)
5088 tree t_tramp, t_func, t_chain;
5089 rtx m_tramp, r_tramp, r_chain, tmp;
5091 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5092 POINTER_TYPE, VOID_TYPE))
5095 t_tramp = CALL_EXPR_ARG (exp, 0);
5096 t_func = CALL_EXPR_ARG (exp, 1);
5097 t_chain = CALL_EXPR_ARG (exp, 2);
5099 r_tramp = expand_normal (t_tramp);
5100 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5101 MEM_NOTRAP_P (m_tramp) = 1;
5103 /* The TRAMP argument should be the address of a field within the
5104 local function's FRAME decl. Let's see if we can fill in the
5105 to fill in the MEM_ATTRs for this memory. */
5106 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5107 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5110 tmp = round_trampoline_addr (r_tramp);
5113 m_tramp = change_address (m_tramp, BLKmode, tmp);
5114 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5115 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5118 /* The FUNC argument should be the address of the nested function.
5119 Extract the actual function decl to pass to the hook. */
5120 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5121 t_func = TREE_OPERAND (t_func, 0);
5122 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5124 r_chain = expand_normal (t_chain);
5126 /* Generate insns to initialize the trampoline. */
5127 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5129 trampolines_created = 1;
5134 expand_builtin_adjust_trampoline (tree exp)
5138 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5141 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5142 tramp = round_trampoline_addr (tramp);
5143 if (targetm.calls.trampoline_adjust_address)
5144 tramp = targetm.calls.trampoline_adjust_address (tramp);
5149 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5150 function. The function first checks whether the back end provides
5151 an insn to implement signbit for the respective mode. If not, it
5152 checks whether the floating point format of the value is such that
5153 the sign bit can be extracted. If that is not the case, the
5154 function returns NULL_RTX to indicate that a normal call should be
5155 emitted rather than expanding the function in-line. EXP is the
5156 expression that is a call to the builtin function; if convenient,
5157 the result should be placed in TARGET. */
5159 expand_builtin_signbit (tree exp, rtx target)
5161 const struct real_format *fmt;
5162 enum machine_mode fmode, imode, rmode;
5163 HOST_WIDE_INT hi, lo;
5166 enum insn_code icode;
5168 location_t loc = EXPR_LOCATION (exp);
5170 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5173 arg = CALL_EXPR_ARG (exp, 0);
5174 fmode = TYPE_MODE (TREE_TYPE (arg));
5175 rmode = TYPE_MODE (TREE_TYPE (exp));
5176 fmt = REAL_MODE_FORMAT (fmode);
5178 arg = builtin_save_expr (arg);
5180 /* Expand the argument yielding a RTX expression. */
5181 temp = expand_normal (arg);
5183 /* Check if the back end provides an insn that handles signbit for the
5185 icode = signbit_optab->handlers [(int) fmode].insn_code;
5186 if (icode != CODE_FOR_nothing)
5188 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5189 emit_unop_insn (icode, target, temp, UNKNOWN);
5193 /* For floating point formats without a sign bit, implement signbit
5195 bitpos = fmt->signbit_ro;
5198 /* But we can't do this if the format supports signed zero. */
5199 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5202 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5203 build_real (TREE_TYPE (arg), dconst0));
5204 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5207 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5209 imode = int_mode_for_mode (fmode);
5210 if (imode == BLKmode)
5212 temp = gen_lowpart (imode, temp);
5217 /* Handle targets with different FP word orders. */
5218 if (FLOAT_WORDS_BIG_ENDIAN)
5219 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5221 word = bitpos / BITS_PER_WORD;
5222 temp = operand_subword_force (temp, word, fmode);
5223 bitpos = bitpos % BITS_PER_WORD;
5226 /* Force the intermediate word_mode (or narrower) result into a
5227 register. This avoids attempting to create paradoxical SUBREGs
5228 of floating point modes below. */
5229 temp = force_reg (imode, temp);
5231 /* If the bitpos is within the "result mode" lowpart, the operation
5232 can be implement with a single bitwise AND. Otherwise, we need
5233 a right shift and an AND. */
5235 if (bitpos < GET_MODE_BITSIZE (rmode))
5237 if (bitpos < HOST_BITS_PER_WIDE_INT)
5240 lo = (HOST_WIDE_INT) 1 << bitpos;
5244 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5248 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5249 temp = gen_lowpart (rmode, temp);
5250 temp = expand_binop (rmode, and_optab, temp,
5251 immed_double_const (lo, hi, rmode),
5252 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5256 /* Perform a logical right shift to place the signbit in the least
5257 significant bit, then truncate the result to the desired mode
5258 and mask just this bit. */
5259 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5260 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5261 temp = gen_lowpart (rmode, temp);
5262 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5263 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5269 /* Expand fork or exec calls. TARGET is the desired target of the
5270 call. EXP is the call. FN is the
5271 identificator of the actual function. IGNORE is nonzero if the
5272 value is to be ignored. */
5275 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5280 /* If we are not profiling, just call the function. */
5281 if (!profile_arc_flag)
5284 /* Otherwise call the wrapper. This should be equivalent for the rest of
5285 compiler, so the code does not diverge, and the wrapper may run the
5286 code necessary for keeping the profiling sane. */
5288 switch (DECL_FUNCTION_CODE (fn))
5291 id = get_identifier ("__gcov_fork");
5294 case BUILT_IN_EXECL:
5295 id = get_identifier ("__gcov_execl");
5298 case BUILT_IN_EXECV:
5299 id = get_identifier ("__gcov_execv");
5302 case BUILT_IN_EXECLP:
5303 id = get_identifier ("__gcov_execlp");
5306 case BUILT_IN_EXECLE:
5307 id = get_identifier ("__gcov_execle");
5310 case BUILT_IN_EXECVP:
5311 id = get_identifier ("__gcov_execvp");
5314 case BUILT_IN_EXECVE:
5315 id = get_identifier ("__gcov_execve");
5322 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5323 FUNCTION_DECL, id, TREE_TYPE (fn));
5324 DECL_EXTERNAL (decl) = 1;
5325 TREE_PUBLIC (decl) = 1;
5326 DECL_ARTIFICIAL (decl) = 1;
5327 TREE_NOTHROW (decl) = 1;
5328 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5329 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5330 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5331 return expand_call (call, target, ignore);
5336 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5337 the pointer in these functions is void*, the tree optimizers may remove
5338 casts. The mode computed in expand_builtin isn't reliable either, due
5339 to __sync_bool_compare_and_swap.
5341 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5342 group of builtins. This gives us log2 of the mode size. */
5344 static inline enum machine_mode
5345 get_builtin_sync_mode (int fcode_diff)
5347 /* The size is not negotiable, so ask not to get BLKmode in return
5348 if the target indicates that a smaller size would be better. */
5349 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5352 /* Expand the memory expression LOC and return the appropriate memory operand
5353 for the builtin_sync operations. */
5356 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5360 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5361 addr = convert_memory_address (Pmode, addr);
5363 /* Note that we explicitly do not want any alias information for this
5364 memory, so that we kill all other live memories. Otherwise we don't
5365 satisfy the full barrier semantics of the intrinsic. */
5366 mem = validize_mem (gen_rtx_MEM (mode, addr));
5368 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5369 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5370 MEM_VOLATILE_P (mem) = 1;
5375 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5376 EXP is the CALL_EXPR. CODE is the rtx code
5377 that corresponds to the arithmetic or logical operation from the name;
5378 an exception here is that NOT actually means NAND. TARGET is an optional
5379 place for us to store the results; AFTER is true if this is the
5380 fetch_and_xxx form. IGNORE is true if we don't actually care about
5381 the result of the operation at all. */
5384 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5385 enum rtx_code code, bool after,
5386 rtx target, bool ignore)
5389 enum machine_mode old_mode;
5390 location_t loc = EXPR_LOCATION (exp);
5392 if (code == NOT && warn_sync_nand)
5394 tree fndecl = get_callee_fndecl (exp);
5395 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5397 static bool warned_f_a_n, warned_n_a_f;
5401 case BUILT_IN_FETCH_AND_NAND_1:
5402 case BUILT_IN_FETCH_AND_NAND_2:
5403 case BUILT_IN_FETCH_AND_NAND_4:
5404 case BUILT_IN_FETCH_AND_NAND_8:
5405 case BUILT_IN_FETCH_AND_NAND_16:
5410 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5411 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5412 warned_f_a_n = true;
5415 case BUILT_IN_NAND_AND_FETCH_1:
5416 case BUILT_IN_NAND_AND_FETCH_2:
5417 case BUILT_IN_NAND_AND_FETCH_4:
5418 case BUILT_IN_NAND_AND_FETCH_8:
5419 case BUILT_IN_NAND_AND_FETCH_16:
5424 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5425 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5426 warned_n_a_f = true;
5434 /* Expand the operands. */
5435 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5437 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5438 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5439 of CONST_INTs, where we know the old_mode only from the call argument. */
5440 old_mode = GET_MODE (val);
5441 if (old_mode == VOIDmode)
5442 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5443 val = convert_modes (mode, old_mode, val, 1);
5446 return expand_sync_operation (mem, val, code);
5448 return expand_sync_fetch_operation (mem, val, code, after, target);
5451 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5452 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5453 true if this is the boolean form. TARGET is a place for us to store the
5454 results; this is NOT optional if IS_BOOL is true. */
5457 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5458 bool is_bool, rtx target)
5460 rtx old_val, new_val, mem;
5461 enum machine_mode old_mode;
5463 /* Expand the operands. */
5464 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5467 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5468 mode, EXPAND_NORMAL);
5469 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5470 of CONST_INTs, where we know the old_mode only from the call argument. */
5471 old_mode = GET_MODE (old_val);
5472 if (old_mode == VOIDmode)
5473 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5474 old_val = convert_modes (mode, old_mode, old_val, 1);
5476 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5477 mode, EXPAND_NORMAL);
5478 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5479 of CONST_INTs, where we know the old_mode only from the call argument. */
5480 old_mode = GET_MODE (new_val);
5481 if (old_mode == VOIDmode)
5482 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5483 new_val = convert_modes (mode, old_mode, new_val, 1);
5486 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5488 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5491 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5492 general form is actually an atomic exchange, and some targets only
5493 support a reduced form with the second argument being a constant 1.
5494 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5498 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5502 enum machine_mode old_mode;
5504 /* Expand the operands. */
5505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5506 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5507 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5508 of CONST_INTs, where we know the old_mode only from the call argument. */
5509 old_mode = GET_MODE (val);
5510 if (old_mode == VOIDmode)
5511 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5512 val = convert_modes (mode, old_mode, val, 1);
5514 return expand_sync_lock_test_and_set (mem, val, target);
5517 /* Expand the __sync_synchronize intrinsic. */
5520 expand_builtin_synchronize (void)
5523 VEC (tree, gc) *v_clobbers;
5525 #ifdef HAVE_memory_barrier
5526 if (HAVE_memory_barrier)
5528 emit_insn (gen_memory_barrier ());
5533 if (synchronize_libfunc != NULL_RTX)
5535 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5539 /* If no explicit memory barrier instruction is available, create an
5540 empty asm stmt with a memory clobber. */
5541 v_clobbers = VEC_alloc (tree, gc, 1);
5542 VEC_quick_push (tree, v_clobbers,
5543 tree_cons (NULL, build_string (6, "memory"), NULL));
5544 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5545 gimple_asm_set_volatile (x, true);
5546 expand_asm_stmt (x);
5549 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5552 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5554 enum insn_code icode;
5556 rtx val = const0_rtx;
5558 /* Expand the operands. */
5559 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5561 /* If there is an explicit operation in the md file, use it. */
5562 icode = sync_lock_release[mode];
5563 if (icode != CODE_FOR_nothing)
5565 if (!insn_data[icode].operand[1].predicate (val, mode))
5566 val = force_reg (mode, val);
5568 insn = GEN_FCN (icode) (mem, val);
5576 /* Otherwise we can implement this operation by emitting a barrier
5577 followed by a store of zero. */
5578 expand_builtin_synchronize ();
5579 emit_move_insn (mem, val);
5582 /* Expand an expression EXP that calls a built-in function,
5583 with result going to TARGET if that's convenient
5584 (and in mode MODE if that's convenient).
5585 SUBTARGET may be used as the target for computing one of EXP's operands.
5586 IGNORE is nonzero if the value is to be ignored. */
5589 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5592 tree fndecl = get_callee_fndecl (exp);
5593 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5594 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5596 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5597 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5599 /* When not optimizing, generate calls to library functions for a certain
5602 && !called_as_built_in (fndecl)
5603 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5604 && fcode != BUILT_IN_ALLOCA
5605 && fcode != BUILT_IN_FREE)
5606 return expand_call (exp, target, ignore);
5608 /* The built-in function expanders test for target == const0_rtx
5609 to determine whether the function's result will be ignored. */
5611 target = const0_rtx;
5613 /* If the result of a pure or const built-in function is ignored, and
5614 none of its arguments are volatile, we can avoid expanding the
5615 built-in call and just evaluate the arguments for side-effects. */
5616 if (target == const0_rtx
5617 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5619 bool volatilep = false;
5621 call_expr_arg_iterator iter;
5623 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5624 if (TREE_THIS_VOLATILE (arg))
5632 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5633 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5640 CASE_FLT_FN (BUILT_IN_FABS):
5641 target = expand_builtin_fabs (exp, target, subtarget);
5646 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5647 target = expand_builtin_copysign (exp, target, subtarget);
5652 /* Just do a normal library call if we were unable to fold
5654 CASE_FLT_FN (BUILT_IN_CABS):
5657 CASE_FLT_FN (BUILT_IN_EXP):
5658 CASE_FLT_FN (BUILT_IN_EXP10):
5659 CASE_FLT_FN (BUILT_IN_POW10):
5660 CASE_FLT_FN (BUILT_IN_EXP2):
5661 CASE_FLT_FN (BUILT_IN_EXPM1):
5662 CASE_FLT_FN (BUILT_IN_LOGB):
5663 CASE_FLT_FN (BUILT_IN_LOG):
5664 CASE_FLT_FN (BUILT_IN_LOG10):
5665 CASE_FLT_FN (BUILT_IN_LOG2):
5666 CASE_FLT_FN (BUILT_IN_LOG1P):
5667 CASE_FLT_FN (BUILT_IN_TAN):
5668 CASE_FLT_FN (BUILT_IN_ASIN):
5669 CASE_FLT_FN (BUILT_IN_ACOS):
5670 CASE_FLT_FN (BUILT_IN_ATAN):
5671 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5672 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5673 because of possible accuracy problems. */
5674 if (! flag_unsafe_math_optimizations)
5676 CASE_FLT_FN (BUILT_IN_SQRT):
5677 CASE_FLT_FN (BUILT_IN_FLOOR):
5678 CASE_FLT_FN (BUILT_IN_CEIL):
5679 CASE_FLT_FN (BUILT_IN_TRUNC):
5680 CASE_FLT_FN (BUILT_IN_ROUND):
5681 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5682 CASE_FLT_FN (BUILT_IN_RINT):
5683 target = expand_builtin_mathfn (exp, target, subtarget);
5688 CASE_FLT_FN (BUILT_IN_ILOGB):
5689 if (! flag_unsafe_math_optimizations)
5691 CASE_FLT_FN (BUILT_IN_ISINF):
5692 CASE_FLT_FN (BUILT_IN_FINITE):
5693 case BUILT_IN_ISFINITE:
5694 case BUILT_IN_ISNORMAL:
5695 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5700 CASE_FLT_FN (BUILT_IN_LCEIL):
5701 CASE_FLT_FN (BUILT_IN_LLCEIL):
5702 CASE_FLT_FN (BUILT_IN_LFLOOR):
5703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5704 target = expand_builtin_int_roundingfn (exp, target);
5709 CASE_FLT_FN (BUILT_IN_LRINT):
5710 CASE_FLT_FN (BUILT_IN_LLRINT):
5711 CASE_FLT_FN (BUILT_IN_LROUND):
5712 CASE_FLT_FN (BUILT_IN_LLROUND):
5713 target = expand_builtin_int_roundingfn_2 (exp, target);
5718 CASE_FLT_FN (BUILT_IN_POW):
5719 target = expand_builtin_pow (exp, target, subtarget);
5724 CASE_FLT_FN (BUILT_IN_POWI):
5725 target = expand_builtin_powi (exp, target, subtarget);
5730 CASE_FLT_FN (BUILT_IN_ATAN2):
5731 CASE_FLT_FN (BUILT_IN_LDEXP):
5732 CASE_FLT_FN (BUILT_IN_SCALB):
5733 CASE_FLT_FN (BUILT_IN_SCALBN):
5734 CASE_FLT_FN (BUILT_IN_SCALBLN):
5735 if (! flag_unsafe_math_optimizations)
5738 CASE_FLT_FN (BUILT_IN_FMOD):
5739 CASE_FLT_FN (BUILT_IN_REMAINDER):
5740 CASE_FLT_FN (BUILT_IN_DREM):
5741 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5746 CASE_FLT_FN (BUILT_IN_CEXPI):
5747 target = expand_builtin_cexpi (exp, target, subtarget);
5748 gcc_assert (target);
5751 CASE_FLT_FN (BUILT_IN_SIN):
5752 CASE_FLT_FN (BUILT_IN_COS):
5753 if (! flag_unsafe_math_optimizations)
5755 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5760 CASE_FLT_FN (BUILT_IN_SINCOS):
5761 if (! flag_unsafe_math_optimizations)
5763 target = expand_builtin_sincos (exp);
5768 case BUILT_IN_APPLY_ARGS:
5769 return expand_builtin_apply_args ();
5771 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5772 FUNCTION with a copy of the parameters described by
5773 ARGUMENTS, and ARGSIZE. It returns a block of memory
5774 allocated on the stack into which is stored all the registers
5775 that might possibly be used for returning the result of a
5776 function. ARGUMENTS is the value returned by
5777 __builtin_apply_args. ARGSIZE is the number of bytes of
5778 arguments that must be copied. ??? How should this value be
5779 computed? We'll also need a safe worst case value for varargs
5781 case BUILT_IN_APPLY:
5782 if (!validate_arglist (exp, POINTER_TYPE,
5783 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5784 && !validate_arglist (exp, REFERENCE_TYPE,
5785 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5791 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5792 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5793 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5795 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5798 /* __builtin_return (RESULT) causes the function to return the
5799 value described by RESULT. RESULT is address of the block of
5800 memory returned by __builtin_apply. */
5801 case BUILT_IN_RETURN:
5802 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5803 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5806 case BUILT_IN_SAVEREGS:
5807 return expand_builtin_saveregs ();
5809 case BUILT_IN_ARGS_INFO:
5810 return expand_builtin_args_info (exp);
5812 case BUILT_IN_VA_ARG_PACK:
5813 /* All valid uses of __builtin_va_arg_pack () are removed during
5815 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5818 case BUILT_IN_VA_ARG_PACK_LEN:
5819 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5821 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5824 /* Return the address of the first anonymous stack arg. */
5825 case BUILT_IN_NEXT_ARG:
5826 if (fold_builtin_next_arg (exp, false))
5828 return expand_builtin_next_arg ();
5830 case BUILT_IN_CLEAR_CACHE:
5831 target = expand_builtin___clear_cache (exp);
5836 case BUILT_IN_CLASSIFY_TYPE:
5837 return expand_builtin_classify_type (exp);
5839 case BUILT_IN_CONSTANT_P:
5842 case BUILT_IN_FRAME_ADDRESS:
5843 case BUILT_IN_RETURN_ADDRESS:
5844 return expand_builtin_frame_address (fndecl, exp);
5846 /* Returns the address of the area where the structure is returned.
5848 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5849 if (call_expr_nargs (exp) != 0
5850 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5851 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5854 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5856 case BUILT_IN_ALLOCA:
5857 target = expand_builtin_alloca (exp, target);
5862 case BUILT_IN_STACK_SAVE:
5863 return expand_stack_save ();
5865 case BUILT_IN_STACK_RESTORE:
5866 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5869 case BUILT_IN_BSWAP32:
5870 case BUILT_IN_BSWAP64:
5871 target = expand_builtin_bswap (exp, target, subtarget);
5877 CASE_INT_FN (BUILT_IN_FFS):
5878 case BUILT_IN_FFSIMAX:
5879 target = expand_builtin_unop (target_mode, exp, target,
5880 subtarget, ffs_optab);
5885 CASE_INT_FN (BUILT_IN_CLZ):
5886 case BUILT_IN_CLZIMAX:
5887 target = expand_builtin_unop (target_mode, exp, target,
5888 subtarget, clz_optab);
5893 CASE_INT_FN (BUILT_IN_CTZ):
5894 case BUILT_IN_CTZIMAX:
5895 target = expand_builtin_unop (target_mode, exp, target,
5896 subtarget, ctz_optab);
5901 CASE_INT_FN (BUILT_IN_POPCOUNT):
5902 case BUILT_IN_POPCOUNTIMAX:
5903 target = expand_builtin_unop (target_mode, exp, target,
5904 subtarget, popcount_optab);
5909 CASE_INT_FN (BUILT_IN_PARITY):
5910 case BUILT_IN_PARITYIMAX:
5911 target = expand_builtin_unop (target_mode, exp, target,
5912 subtarget, parity_optab);
5917 case BUILT_IN_STRLEN:
5918 target = expand_builtin_strlen (exp, target, target_mode);
5923 case BUILT_IN_STRCPY:
5924 target = expand_builtin_strcpy (exp, target);
5929 case BUILT_IN_STRNCPY:
5930 target = expand_builtin_strncpy (exp, target);
5935 case BUILT_IN_STPCPY:
5936 target = expand_builtin_stpcpy (exp, target, mode);
5941 case BUILT_IN_MEMCPY:
5942 target = expand_builtin_memcpy (exp, target);
5947 case BUILT_IN_MEMPCPY:
5948 target = expand_builtin_mempcpy (exp, target, mode);
5953 case BUILT_IN_MEMSET:
5954 target = expand_builtin_memset (exp, target, mode);
5959 case BUILT_IN_BZERO:
5960 target = expand_builtin_bzero (exp);
5965 case BUILT_IN_STRCMP:
5966 target = expand_builtin_strcmp (exp, target);
5971 case BUILT_IN_STRNCMP:
5972 target = expand_builtin_strncmp (exp, target, mode);
5978 case BUILT_IN_MEMCMP:
5979 target = expand_builtin_memcmp (exp, target, mode);
5984 case BUILT_IN_SETJMP:
5985 /* This should have been lowered to the builtins below. */
5988 case BUILT_IN_SETJMP_SETUP:
5989 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5990 and the receiver label. */
5991 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5993 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5994 VOIDmode, EXPAND_NORMAL);
5995 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5996 rtx label_r = label_rtx (label);
5998 /* This is copied from the handling of non-local gotos. */
5999 expand_builtin_setjmp_setup (buf_addr, label_r);
6000 nonlocal_goto_handler_labels
6001 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6002 nonlocal_goto_handler_labels);
6003 /* ??? Do not let expand_label treat us as such since we would
6004 not want to be both on the list of non-local labels and on
6005 the list of forced labels. */
6006 FORCED_LABEL (label) = 0;
6011 case BUILT_IN_SETJMP_DISPATCHER:
6012 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6013 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6015 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6016 rtx label_r = label_rtx (label);
6018 /* Remove the dispatcher label from the list of non-local labels
6019 since the receiver labels have been added to it above. */
6020 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6025 case BUILT_IN_SETJMP_RECEIVER:
6026 /* __builtin_setjmp_receiver is passed the receiver label. */
6027 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6029 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6030 rtx label_r = label_rtx (label);
6032 expand_builtin_setjmp_receiver (label_r);
6037 /* __builtin_longjmp is passed a pointer to an array of five words.
6038 It's similar to the C library longjmp function but works with
6039 __builtin_setjmp above. */
6040 case BUILT_IN_LONGJMP:
6041 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6043 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6044 VOIDmode, EXPAND_NORMAL);
6045 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6047 if (value != const1_rtx)
6049 error ("%<__builtin_longjmp%> second argument must be 1");
6053 expand_builtin_longjmp (buf_addr, value);
6058 case BUILT_IN_NONLOCAL_GOTO:
6059 target = expand_builtin_nonlocal_goto (exp);
6064 /* This updates the setjmp buffer that is its argument with the value
6065 of the current stack pointer. */
6066 case BUILT_IN_UPDATE_SETJMP_BUF:
6067 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6070 = expand_normal (CALL_EXPR_ARG (exp, 0));
6072 expand_builtin_update_setjmp_buf (buf_addr);
6078 expand_builtin_trap ();
6081 case BUILT_IN_UNREACHABLE:
6082 expand_builtin_unreachable ();
6085 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6086 case BUILT_IN_SIGNBITD32:
6087 case BUILT_IN_SIGNBITD64:
6088 case BUILT_IN_SIGNBITD128:
6089 target = expand_builtin_signbit (exp, target);
6094 /* Various hooks for the DWARF 2 __throw routine. */
6095 case BUILT_IN_UNWIND_INIT:
6096 expand_builtin_unwind_init ();
6098 case BUILT_IN_DWARF_CFA:
6099 return virtual_cfa_rtx;
6100 #ifdef DWARF2_UNWIND_INFO
6101 case BUILT_IN_DWARF_SP_COLUMN:
6102 return expand_builtin_dwarf_sp_column ();
6103 case BUILT_IN_INIT_DWARF_REG_SIZES:
6104 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6107 case BUILT_IN_FROB_RETURN_ADDR:
6108 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6109 case BUILT_IN_EXTRACT_RETURN_ADDR:
6110 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6111 case BUILT_IN_EH_RETURN:
6112 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6113 CALL_EXPR_ARG (exp, 1));
6115 #ifdef EH_RETURN_DATA_REGNO
6116 case BUILT_IN_EH_RETURN_DATA_REGNO:
6117 return expand_builtin_eh_return_data_regno (exp);
6119 case BUILT_IN_EXTEND_POINTER:
6120 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6121 case BUILT_IN_EH_POINTER:
6122 return expand_builtin_eh_pointer (exp);
6123 case BUILT_IN_EH_FILTER:
6124 return expand_builtin_eh_filter (exp);
6125 case BUILT_IN_EH_COPY_VALUES:
6126 return expand_builtin_eh_copy_values (exp);
6128 case BUILT_IN_VA_START:
6129 return expand_builtin_va_start (exp);
6130 case BUILT_IN_VA_END:
6131 return expand_builtin_va_end (exp);
6132 case BUILT_IN_VA_COPY:
6133 return expand_builtin_va_copy (exp);
6134 case BUILT_IN_EXPECT:
6135 return expand_builtin_expect (exp, target);
6136 case BUILT_IN_PREFETCH:
6137 expand_builtin_prefetch (exp);
6140 case BUILT_IN_PROFILE_FUNC_ENTER:
6141 return expand_builtin_profile_func (false);
6142 case BUILT_IN_PROFILE_FUNC_EXIT:
6143 return expand_builtin_profile_func (true);
6145 case BUILT_IN_INIT_TRAMPOLINE:
6146 return expand_builtin_init_trampoline (exp);
6147 case BUILT_IN_ADJUST_TRAMPOLINE:
6148 return expand_builtin_adjust_trampoline (exp);
6151 case BUILT_IN_EXECL:
6152 case BUILT_IN_EXECV:
6153 case BUILT_IN_EXECLP:
6154 case BUILT_IN_EXECLE:
6155 case BUILT_IN_EXECVP:
6156 case BUILT_IN_EXECVE:
6157 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6162 case BUILT_IN_FETCH_AND_ADD_1:
6163 case BUILT_IN_FETCH_AND_ADD_2:
6164 case BUILT_IN_FETCH_AND_ADD_4:
6165 case BUILT_IN_FETCH_AND_ADD_8:
6166 case BUILT_IN_FETCH_AND_ADD_16:
6167 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6168 target = expand_builtin_sync_operation (mode, exp, PLUS,
6169 false, target, ignore);
6174 case BUILT_IN_FETCH_AND_SUB_1:
6175 case BUILT_IN_FETCH_AND_SUB_2:
6176 case BUILT_IN_FETCH_AND_SUB_4:
6177 case BUILT_IN_FETCH_AND_SUB_8:
6178 case BUILT_IN_FETCH_AND_SUB_16:
6179 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6180 target = expand_builtin_sync_operation (mode, exp, MINUS,
6181 false, target, ignore);
6186 case BUILT_IN_FETCH_AND_OR_1:
6187 case BUILT_IN_FETCH_AND_OR_2:
6188 case BUILT_IN_FETCH_AND_OR_4:
6189 case BUILT_IN_FETCH_AND_OR_8:
6190 case BUILT_IN_FETCH_AND_OR_16:
6191 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6192 target = expand_builtin_sync_operation (mode, exp, IOR,
6193 false, target, ignore);
6198 case BUILT_IN_FETCH_AND_AND_1:
6199 case BUILT_IN_FETCH_AND_AND_2:
6200 case BUILT_IN_FETCH_AND_AND_4:
6201 case BUILT_IN_FETCH_AND_AND_8:
6202 case BUILT_IN_FETCH_AND_AND_16:
6203 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6204 target = expand_builtin_sync_operation (mode, exp, AND,
6205 false, target, ignore);
6210 case BUILT_IN_FETCH_AND_XOR_1:
6211 case BUILT_IN_FETCH_AND_XOR_2:
6212 case BUILT_IN_FETCH_AND_XOR_4:
6213 case BUILT_IN_FETCH_AND_XOR_8:
6214 case BUILT_IN_FETCH_AND_XOR_16:
6215 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6216 target = expand_builtin_sync_operation (mode, exp, XOR,
6217 false, target, ignore);
6222 case BUILT_IN_FETCH_AND_NAND_1:
6223 case BUILT_IN_FETCH_AND_NAND_2:
6224 case BUILT_IN_FETCH_AND_NAND_4:
6225 case BUILT_IN_FETCH_AND_NAND_8:
6226 case BUILT_IN_FETCH_AND_NAND_16:
6227 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6228 target = expand_builtin_sync_operation (mode, exp, NOT,
6229 false, target, ignore);
6234 case BUILT_IN_ADD_AND_FETCH_1:
6235 case BUILT_IN_ADD_AND_FETCH_2:
6236 case BUILT_IN_ADD_AND_FETCH_4:
6237 case BUILT_IN_ADD_AND_FETCH_8:
6238 case BUILT_IN_ADD_AND_FETCH_16:
6239 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6240 target = expand_builtin_sync_operation (mode, exp, PLUS,
6241 true, target, ignore);
6246 case BUILT_IN_SUB_AND_FETCH_1:
6247 case BUILT_IN_SUB_AND_FETCH_2:
6248 case BUILT_IN_SUB_AND_FETCH_4:
6249 case BUILT_IN_SUB_AND_FETCH_8:
6250 case BUILT_IN_SUB_AND_FETCH_16:
6251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6252 target = expand_builtin_sync_operation (mode, exp, MINUS,
6253 true, target, ignore);
6258 case BUILT_IN_OR_AND_FETCH_1:
6259 case BUILT_IN_OR_AND_FETCH_2:
6260 case BUILT_IN_OR_AND_FETCH_4:
6261 case BUILT_IN_OR_AND_FETCH_8:
6262 case BUILT_IN_OR_AND_FETCH_16:
6263 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6264 target = expand_builtin_sync_operation (mode, exp, IOR,
6265 true, target, ignore);
6270 case BUILT_IN_AND_AND_FETCH_1:
6271 case BUILT_IN_AND_AND_FETCH_2:
6272 case BUILT_IN_AND_AND_FETCH_4:
6273 case BUILT_IN_AND_AND_FETCH_8:
6274 case BUILT_IN_AND_AND_FETCH_16:
6275 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6276 target = expand_builtin_sync_operation (mode, exp, AND,
6277 true, target, ignore);
6282 case BUILT_IN_XOR_AND_FETCH_1:
6283 case BUILT_IN_XOR_AND_FETCH_2:
6284 case BUILT_IN_XOR_AND_FETCH_4:
6285 case BUILT_IN_XOR_AND_FETCH_8:
6286 case BUILT_IN_XOR_AND_FETCH_16:
6287 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6288 target = expand_builtin_sync_operation (mode, exp, XOR,
6289 true, target, ignore);
6294 case BUILT_IN_NAND_AND_FETCH_1:
6295 case BUILT_IN_NAND_AND_FETCH_2:
6296 case BUILT_IN_NAND_AND_FETCH_4:
6297 case BUILT_IN_NAND_AND_FETCH_8:
6298 case BUILT_IN_NAND_AND_FETCH_16:
6299 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6300 target = expand_builtin_sync_operation (mode, exp, NOT,
6301 true, target, ignore);
6306 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6307 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6308 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6309 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6310 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6311 if (mode == VOIDmode)
6312 mode = TYPE_MODE (boolean_type_node);
6313 if (!target || !register_operand (target, mode))
6314 target = gen_reg_rtx (mode);
6316 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6317 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6322 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6323 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6324 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6325 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6326 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6327 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6328 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6333 case BUILT_IN_LOCK_TEST_AND_SET_1:
6334 case BUILT_IN_LOCK_TEST_AND_SET_2:
6335 case BUILT_IN_LOCK_TEST_AND_SET_4:
6336 case BUILT_IN_LOCK_TEST_AND_SET_8:
6337 case BUILT_IN_LOCK_TEST_AND_SET_16:
6338 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6339 target = expand_builtin_lock_test_and_set (mode, exp, target);
6344 case BUILT_IN_LOCK_RELEASE_1:
6345 case BUILT_IN_LOCK_RELEASE_2:
6346 case BUILT_IN_LOCK_RELEASE_4:
6347 case BUILT_IN_LOCK_RELEASE_8:
6348 case BUILT_IN_LOCK_RELEASE_16:
6349 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6350 expand_builtin_lock_release (mode, exp);
6353 case BUILT_IN_SYNCHRONIZE:
6354 expand_builtin_synchronize ();
6357 case BUILT_IN_OBJECT_SIZE:
6358 return expand_builtin_object_size (exp);
6360 case BUILT_IN_MEMCPY_CHK:
6361 case BUILT_IN_MEMPCPY_CHK:
6362 case BUILT_IN_MEMMOVE_CHK:
6363 case BUILT_IN_MEMSET_CHK:
6364 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6369 case BUILT_IN_STRCPY_CHK:
6370 case BUILT_IN_STPCPY_CHK:
6371 case BUILT_IN_STRNCPY_CHK:
6372 case BUILT_IN_STRCAT_CHK:
6373 case BUILT_IN_STRNCAT_CHK:
6374 case BUILT_IN_SNPRINTF_CHK:
6375 case BUILT_IN_VSNPRINTF_CHK:
6376 maybe_emit_chk_warning (exp, fcode);
6379 case BUILT_IN_SPRINTF_CHK:
6380 case BUILT_IN_VSPRINTF_CHK:
6381 maybe_emit_sprintf_chk_warning (exp, fcode);
6385 maybe_emit_free_warning (exp);
6388 default: /* just do library call, if unknown builtin */
6392 /* The switch statement above can drop through to cause the function
6393 to be called normally. */
6394 return expand_call (exp, target, ignore);
6397 /* Determine whether a tree node represents a call to a built-in
6398 function. If the tree T is a call to a built-in function with
6399 the right number of arguments of the appropriate types, return
6400 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6401 Otherwise the return value is END_BUILTINS. */
6403 enum built_in_function
6404 builtin_mathfn_code (const_tree t)
6406 const_tree fndecl, arg, parmlist;
6407 const_tree argtype, parmtype;
6408 const_call_expr_arg_iterator iter;
6410 if (TREE_CODE (t) != CALL_EXPR
6411 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6412 return END_BUILTINS;
6414 fndecl = get_callee_fndecl (t);
6415 if (fndecl == NULL_TREE
6416 || TREE_CODE (fndecl) != FUNCTION_DECL
6417 || ! DECL_BUILT_IN (fndecl)
6418 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6419 return END_BUILTINS;
6421 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6422 init_const_call_expr_arg_iterator (t, &iter);
6423 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6425 /* If a function doesn't take a variable number of arguments,
6426 the last element in the list will have type `void'. */
6427 parmtype = TREE_VALUE (parmlist);
6428 if (VOID_TYPE_P (parmtype))
6430 if (more_const_call_expr_args_p (&iter))
6431 return END_BUILTINS;
6432 return DECL_FUNCTION_CODE (fndecl);
6435 if (! more_const_call_expr_args_p (&iter))
6436 return END_BUILTINS;
6438 arg = next_const_call_expr_arg (&iter);
6439 argtype = TREE_TYPE (arg);
6441 if (SCALAR_FLOAT_TYPE_P (parmtype))
6443 if (! SCALAR_FLOAT_TYPE_P (argtype))
6444 return END_BUILTINS;
6446 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6448 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6449 return END_BUILTINS;
6451 else if (POINTER_TYPE_P (parmtype))
6453 if (! POINTER_TYPE_P (argtype))
6454 return END_BUILTINS;
6456 else if (INTEGRAL_TYPE_P (parmtype))
6458 if (! INTEGRAL_TYPE_P (argtype))
6459 return END_BUILTINS;
6462 return END_BUILTINS;
6465 /* Variable-length argument list. */
6466 return DECL_FUNCTION_CODE (fndecl);
6469 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6470 evaluate to a constant. */
6473 fold_builtin_constant_p (tree arg)
6475 /* We return 1 for a numeric type that's known to be a constant
6476 value at compile-time or for an aggregate type that's a
6477 literal constant. */
6480 /* If we know this is a constant, emit the constant of one. */
6481 if (CONSTANT_CLASS_P (arg)
6482 || (TREE_CODE (arg) == CONSTRUCTOR
6483 && TREE_CONSTANT (arg)))
6484 return integer_one_node;
6485 if (TREE_CODE (arg) == ADDR_EXPR)
6487 tree op = TREE_OPERAND (arg, 0);
6488 if (TREE_CODE (op) == STRING_CST
6489 || (TREE_CODE (op) == ARRAY_REF
6490 && integer_zerop (TREE_OPERAND (op, 1))
6491 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6492 return integer_one_node;
6495 /* If this expression has side effects, show we don't know it to be a
6496 constant. Likewise if it's a pointer or aggregate type since in
6497 those case we only want literals, since those are only optimized
6498 when generating RTL, not later.
6499 And finally, if we are compiling an initializer, not code, we
6500 need to return a definite result now; there's not going to be any
6501 more optimization done. */
6502 if (TREE_SIDE_EFFECTS (arg)
6503 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6504 || POINTER_TYPE_P (TREE_TYPE (arg))
6506 || folding_initializer)
6507 return integer_zero_node;
6512 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6513 return it as a truthvalue. */
6516 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6518 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6520 fn = built_in_decls[BUILT_IN_EXPECT];
6521 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6522 ret_type = TREE_TYPE (TREE_TYPE (fn));
6523 pred_type = TREE_VALUE (arg_types);
6524 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6526 pred = fold_convert_loc (loc, pred_type, pred);
6527 expected = fold_convert_loc (loc, expected_type, expected);
6528 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6530 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6531 build_int_cst (ret_type, 0));
6534 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6535 NULL_TREE if no simplification is possible. */
6538 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6541 enum tree_code code;
6543 /* If this is a builtin_expect within a builtin_expect keep the
6544 inner one. See through a comparison against a constant. It
6545 might have been added to create a thruthvalue. */
6547 if (COMPARISON_CLASS_P (inner)
6548 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6549 inner = TREE_OPERAND (inner, 0);
6551 if (TREE_CODE (inner) == CALL_EXPR
6552 && (fndecl = get_callee_fndecl (inner))
6553 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6554 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6557 /* Distribute the expected value over short-circuiting operators.
6558 See through the cast from truthvalue_type_node to long. */
6560 while (TREE_CODE (inner) == NOP_EXPR
6561 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6562 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6563 inner = TREE_OPERAND (inner, 0);
6565 code = TREE_CODE (inner);
6566 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6568 tree op0 = TREE_OPERAND (inner, 0);
6569 tree op1 = TREE_OPERAND (inner, 1);
6571 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6572 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6573 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6575 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6578 /* If the argument isn't invariant then there's nothing else we can do. */
6579 if (!TREE_CONSTANT (arg0))
6582 /* If we expect that a comparison against the argument will fold to
6583 a constant return the constant. In practice, this means a true
6584 constant or the address of a non-weak symbol. */
6587 if (TREE_CODE (inner) == ADDR_EXPR)
6591 inner = TREE_OPERAND (inner, 0);
6593 while (TREE_CODE (inner) == COMPONENT_REF
6594 || TREE_CODE (inner) == ARRAY_REF);
6595 if ((TREE_CODE (inner) == VAR_DECL
6596 || TREE_CODE (inner) == FUNCTION_DECL)
6597 && DECL_WEAK (inner))
6601 /* Otherwise, ARG0 already has the proper type for the return value. */
6605 /* Fold a call to __builtin_classify_type with argument ARG. */
6608 fold_builtin_classify_type (tree arg)
6611 return build_int_cst (NULL_TREE, no_type_class);
6613 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6616 /* Fold a call to __builtin_strlen with argument ARG. */
6619 fold_builtin_strlen (location_t loc, tree arg)
6621 if (!validate_arg (arg, POINTER_TYPE))
6625 tree len = c_strlen (arg, 0);
6629 /* Convert from the internal "sizetype" type to "size_t". */
6631 len = fold_convert_loc (loc, size_type_node, len);
6639 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6642 fold_builtin_inf (location_t loc, tree type, int warn)
6644 REAL_VALUE_TYPE real;
6646 /* __builtin_inff is intended to be usable to define INFINITY on all
6647 targets. If an infinity is not available, INFINITY expands "to a
6648 positive constant of type float that overflows at translation
6649 time", footnote "In this case, using INFINITY will violate the
6650 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6651 Thus we pedwarn to ensure this constraint violation is
6653 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6654 pedwarn (loc, 0, "target format does not support infinity");
6657 return build_real (type, real);
6660 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6663 fold_builtin_nan (tree arg, tree type, int quiet)
6665 REAL_VALUE_TYPE real;
6668 if (!validate_arg (arg, POINTER_TYPE))
6670 str = c_getstr (arg);
6674 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6677 return build_real (type, real);
6680 /* Return true if the floating point expression T has an integer value.
6681 We also allow +Inf, -Inf and NaN to be considered integer values. */
6684 integer_valued_real_p (tree t)
6686 switch (TREE_CODE (t))
6693 return integer_valued_real_p (TREE_OPERAND (t, 0));
6698 return integer_valued_real_p (TREE_OPERAND (t, 1));
6705 return integer_valued_real_p (TREE_OPERAND (t, 0))
6706 && integer_valued_real_p (TREE_OPERAND (t, 1));
6709 return integer_valued_real_p (TREE_OPERAND (t, 1))
6710 && integer_valued_real_p (TREE_OPERAND (t, 2));
6713 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6717 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6718 if (TREE_CODE (type) == INTEGER_TYPE)
6720 if (TREE_CODE (type) == REAL_TYPE)
6721 return integer_valued_real_p (TREE_OPERAND (t, 0));
6726 switch (builtin_mathfn_code (t))
6728 CASE_FLT_FN (BUILT_IN_CEIL):
6729 CASE_FLT_FN (BUILT_IN_FLOOR):
6730 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6731 CASE_FLT_FN (BUILT_IN_RINT):
6732 CASE_FLT_FN (BUILT_IN_ROUND):
6733 CASE_FLT_FN (BUILT_IN_TRUNC):
6736 CASE_FLT_FN (BUILT_IN_FMIN):
6737 CASE_FLT_FN (BUILT_IN_FMAX):
6738 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6739 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6752 /* FNDECL is assumed to be a builtin where truncation can be propagated
6753 across (for instance floor((double)f) == (double)floorf (f).
6754 Do the transformation for a call with argument ARG. */
6757 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6759 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6761 if (!validate_arg (arg, REAL_TYPE))
6764 /* Integer rounding functions are idempotent. */
6765 if (fcode == builtin_mathfn_code (arg))
6768 /* If argument is already integer valued, and we don't need to worry
6769 about setting errno, there's no need to perform rounding. */
6770 if (! flag_errno_math && integer_valued_real_p (arg))
6775 tree arg0 = strip_float_extensions (arg);
6776 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6777 tree newtype = TREE_TYPE (arg0);
6780 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6781 && (decl = mathfn_built_in (newtype, fcode)))
6782 return fold_convert_loc (loc, ftype,
6783 build_call_expr_loc (loc, decl, 1,
6784 fold_convert_loc (loc,
6791 /* FNDECL is assumed to be builtin which can narrow the FP type of
6792 the argument, for instance lround((double)f) -> lroundf (f).
6793 Do the transformation for a call with argument ARG. */
6796 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6798 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6800 if (!validate_arg (arg, REAL_TYPE))
6803 /* If argument is already integer valued, and we don't need to worry
6804 about setting errno, there's no need to perform rounding. */
6805 if (! flag_errno_math && integer_valued_real_p (arg))
6806 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6807 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6811 tree ftype = TREE_TYPE (arg);
6812 tree arg0 = strip_float_extensions (arg);
6813 tree newtype = TREE_TYPE (arg0);
6816 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6817 && (decl = mathfn_built_in (newtype, fcode)))
6818 return build_call_expr_loc (loc, decl, 1,
6819 fold_convert_loc (loc, newtype, arg0));
6822 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6823 sizeof (long long) == sizeof (long). */
6824 if (TYPE_PRECISION (long_long_integer_type_node)
6825 == TYPE_PRECISION (long_integer_type_node))
6827 tree newfn = NULL_TREE;
6830 CASE_FLT_FN (BUILT_IN_LLCEIL):
6831 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6834 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6835 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6838 CASE_FLT_FN (BUILT_IN_LLROUND):
6839 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6842 CASE_FLT_FN (BUILT_IN_LLRINT):
6843 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6852 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6853 return fold_convert_loc (loc,
6854 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6861 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6862 return type. Return NULL_TREE if no simplification can be made. */
6865 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6869 if (!validate_arg (arg, COMPLEX_TYPE)
6870 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6873 /* Calculate the result when the argument is a constant. */
6874 if (TREE_CODE (arg) == COMPLEX_CST
6875 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6879 if (TREE_CODE (arg) == COMPLEX_EXPR)
6881 tree real = TREE_OPERAND (arg, 0);
6882 tree imag = TREE_OPERAND (arg, 1);
6884 /* If either part is zero, cabs is fabs of the other. */
6885 if (real_zerop (real))
6886 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6887 if (real_zerop (imag))
6888 return fold_build1_loc (loc, ABS_EXPR, type, real);
6890 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6891 if (flag_unsafe_math_optimizations
6892 && operand_equal_p (real, imag, OEP_PURE_SAME))
6894 const REAL_VALUE_TYPE sqrt2_trunc
6895 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6897 return fold_build2_loc (loc, MULT_EXPR, type,
6898 fold_build1_loc (loc, ABS_EXPR, type, real),
6899 build_real (type, sqrt2_trunc));
6903 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6904 if (TREE_CODE (arg) == NEGATE_EXPR
6905 || TREE_CODE (arg) == CONJ_EXPR)
6906 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6908 /* Don't do this when optimizing for size. */
6909 if (flag_unsafe_math_optimizations
6910 && optimize && optimize_function_for_speed_p (cfun))
6912 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6914 if (sqrtfn != NULL_TREE)
6916 tree rpart, ipart, result;
6918 arg = builtin_save_expr (arg);
6920 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6921 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6923 rpart = builtin_save_expr (rpart);
6924 ipart = builtin_save_expr (ipart);
6926 result = fold_build2_loc (loc, PLUS_EXPR, type,
6927 fold_build2_loc (loc, MULT_EXPR, type,
6929 fold_build2_loc (loc, MULT_EXPR, type,
6932 return build_call_expr_loc (loc, sqrtfn, 1, result);
6939 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6940 Return NULL_TREE if no simplification can be made. */
6943 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6946 enum built_in_function fcode;
6949 if (!validate_arg (arg, REAL_TYPE))
6952 /* Calculate the result when the argument is a constant. */
6953 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6956 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6957 fcode = builtin_mathfn_code (arg);
6958 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6960 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6961 arg = fold_build2_loc (loc, MULT_EXPR, type,
6962 CALL_EXPR_ARG (arg, 0),
6963 build_real (type, dconsthalf));
6964 return build_call_expr_loc (loc, expfn, 1, arg);
6967 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6968 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6970 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6974 tree arg0 = CALL_EXPR_ARG (arg, 0);
6976 /* The inner root was either sqrt or cbrt. */
6977 /* This was a conditional expression but it triggered a bug
6979 REAL_VALUE_TYPE dconstroot;
6980 if (BUILTIN_SQRT_P (fcode))
6981 dconstroot = dconsthalf;
6983 dconstroot = dconst_third ();
6985 /* Adjust for the outer root. */
6986 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6987 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6988 tree_root = build_real (type, dconstroot);
6989 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6993 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6994 if (flag_unsafe_math_optimizations
6995 && (fcode == BUILT_IN_POW
6996 || fcode == BUILT_IN_POWF
6997 || fcode == BUILT_IN_POWL))
6999 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7000 tree arg0 = CALL_EXPR_ARG (arg, 0);
7001 tree arg1 = CALL_EXPR_ARG (arg, 1);
7003 if (!tree_expr_nonnegative_p (arg0))
7004 arg0 = build1 (ABS_EXPR, type, arg0);
7005 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7006 build_real (type, dconsthalf));
7007 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7013 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7014 Return NULL_TREE if no simplification can be made. */
7017 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7019 const enum built_in_function fcode = builtin_mathfn_code (arg);
7022 if (!validate_arg (arg, REAL_TYPE))
7025 /* Calculate the result when the argument is a constant. */
7026 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7029 if (flag_unsafe_math_optimizations)
7031 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7032 if (BUILTIN_EXPONENT_P (fcode))
7034 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7035 const REAL_VALUE_TYPE third_trunc =
7036 real_value_truncate (TYPE_MODE (type), dconst_third ());
7037 arg = fold_build2_loc (loc, MULT_EXPR, type,
7038 CALL_EXPR_ARG (arg, 0),
7039 build_real (type, third_trunc));
7040 return build_call_expr_loc (loc, expfn, 1, arg);
7043 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7044 if (BUILTIN_SQRT_P (fcode))
7046 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7050 tree arg0 = CALL_EXPR_ARG (arg, 0);
7052 REAL_VALUE_TYPE dconstroot = dconst_third ();
7054 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7055 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7056 tree_root = build_real (type, dconstroot);
7057 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7061 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7062 if (BUILTIN_CBRT_P (fcode))
7064 tree arg0 = CALL_EXPR_ARG (arg, 0);
7065 if (tree_expr_nonnegative_p (arg0))
7067 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7072 REAL_VALUE_TYPE dconstroot;
7074 real_arithmetic (&dconstroot, MULT_EXPR,
7075 dconst_third_ptr (), dconst_third_ptr ());
7076 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7077 tree_root = build_real (type, dconstroot);
7078 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7083 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7084 if (fcode == BUILT_IN_POW
7085 || fcode == BUILT_IN_POWF
7086 || fcode == BUILT_IN_POWL)
7088 tree arg00 = CALL_EXPR_ARG (arg, 0);
7089 tree arg01 = CALL_EXPR_ARG (arg, 1);
7090 if (tree_expr_nonnegative_p (arg00))
7092 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7093 const REAL_VALUE_TYPE dconstroot
7094 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7095 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7096 build_real (type, dconstroot));
7097 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7104 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7105 TYPE is the type of the return value. Return NULL_TREE if no
7106 simplification can be made. */
7109 fold_builtin_cos (location_t loc,
7110 tree arg, tree type, tree fndecl)
7114 if (!validate_arg (arg, REAL_TYPE))
7117 /* Calculate the result when the argument is a constant. */
7118 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7121 /* Optimize cos(-x) into cos (x). */
7122 if ((narg = fold_strip_sign_ops (arg)))
7123 return build_call_expr_loc (loc, fndecl, 1, narg);
7128 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7129 Return NULL_TREE if no simplification can be made. */
7132 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7134 if (validate_arg (arg, REAL_TYPE))
7138 /* Calculate the result when the argument is a constant. */
7139 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7142 /* Optimize cosh(-x) into cosh (x). */
7143 if ((narg = fold_strip_sign_ops (arg)))
7144 return build_call_expr_loc (loc, fndecl, 1, narg);
7150 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7151 argument ARG. TYPE is the type of the return value. Return
7152 NULL_TREE if no simplification can be made. */
7155 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7158 if (validate_arg (arg, COMPLEX_TYPE)
7159 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7163 /* Calculate the result when the argument is a constant. */
7164 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7167 /* Optimize fn(-x) into fn(x). */
7168 if ((tmp = fold_strip_sign_ops (arg)))
7169 return build_call_expr_loc (loc, fndecl, 1, tmp);
7175 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7176 Return NULL_TREE if no simplification can be made. */
7179 fold_builtin_tan (tree arg, tree type)
7181 enum built_in_function fcode;
7184 if (!validate_arg (arg, REAL_TYPE))
7187 /* Calculate the result when the argument is a constant. */
7188 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7191 /* Optimize tan(atan(x)) = x. */
7192 fcode = builtin_mathfn_code (arg);
7193 if (flag_unsafe_math_optimizations
7194 && (fcode == BUILT_IN_ATAN
7195 || fcode == BUILT_IN_ATANF
7196 || fcode == BUILT_IN_ATANL))
7197 return CALL_EXPR_ARG (arg, 0);
7202 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7203 NULL_TREE if no simplification can be made. */
7206 fold_builtin_sincos (location_t loc,
7207 tree arg0, tree arg1, tree arg2)
7212 if (!validate_arg (arg0, REAL_TYPE)
7213 || !validate_arg (arg1, POINTER_TYPE)
7214 || !validate_arg (arg2, POINTER_TYPE))
7217 type = TREE_TYPE (arg0);
7219 /* Calculate the result when the argument is a constant. */
7220 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7223 /* Canonicalize sincos to cexpi. */
7224 if (!TARGET_C99_FUNCTIONS)
7226 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7230 call = build_call_expr_loc (loc, fn, 1, arg0);
7231 call = builtin_save_expr (call);
7233 return build2 (COMPOUND_EXPR, void_type_node,
7234 build2 (MODIFY_EXPR, void_type_node,
7235 build_fold_indirect_ref_loc (loc, arg1),
7236 build1 (IMAGPART_EXPR, type, call)),
7237 build2 (MODIFY_EXPR, void_type_node,
7238 build_fold_indirect_ref_loc (loc, arg2),
7239 build1 (REALPART_EXPR, type, call)));
7242 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7243 NULL_TREE if no simplification can be made. */
7246 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7249 tree realp, imagp, ifn;
7252 if (!validate_arg (arg0, COMPLEX_TYPE)
7253 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7256 /* Calculate the result when the argument is a constant. */
7257 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7260 rtype = TREE_TYPE (TREE_TYPE (arg0));
7262 /* In case we can figure out the real part of arg0 and it is constant zero
7264 if (!TARGET_C99_FUNCTIONS)
7266 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7270 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7271 && real_zerop (realp))
7273 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7274 return build_call_expr_loc (loc, ifn, 1, narg);
7277 /* In case we can easily decompose real and imaginary parts split cexp
7278 to exp (r) * cexpi (i). */
7279 if (flag_unsafe_math_optimizations
7282 tree rfn, rcall, icall;
7284 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7288 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7292 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7293 icall = builtin_save_expr (icall);
7294 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7295 rcall = builtin_save_expr (rcall);
7296 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7297 fold_build2_loc (loc, MULT_EXPR, rtype,
7299 fold_build1_loc (loc, REALPART_EXPR,
7301 fold_build2_loc (loc, MULT_EXPR, rtype,
7303 fold_build1_loc (loc, IMAGPART_EXPR,
7310 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7311 Return NULL_TREE if no simplification can be made. */
7314 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7316 if (!validate_arg (arg, REAL_TYPE))
7319 /* Optimize trunc of constant value. */
7320 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7322 REAL_VALUE_TYPE r, x;
7323 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7325 x = TREE_REAL_CST (arg);
7326 real_trunc (&r, TYPE_MODE (type), &x);
7327 return build_real (type, r);
7330 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7333 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7334 Return NULL_TREE if no simplification can be made. */
7337 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7339 if (!validate_arg (arg, REAL_TYPE))
7342 /* Optimize floor of constant value. */
7343 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7347 x = TREE_REAL_CST (arg);
7348 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7350 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7353 real_floor (&r, TYPE_MODE (type), &x);
7354 return build_real (type, r);
7358 /* Fold floor (x) where x is nonnegative to trunc (x). */
7359 if (tree_expr_nonnegative_p (arg))
7361 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7363 return build_call_expr_loc (loc, truncfn, 1, arg);
7366 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7369 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7370 Return NULL_TREE if no simplification can be made. */
7373 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7375 if (!validate_arg (arg, REAL_TYPE))
7378 /* Optimize ceil of constant value. */
7379 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7383 x = TREE_REAL_CST (arg);
7384 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7386 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7389 real_ceil (&r, TYPE_MODE (type), &x);
7390 return build_real (type, r);
7394 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7397 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7398 Return NULL_TREE if no simplification can be made. */
7401 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7403 if (!validate_arg (arg, REAL_TYPE))
7406 /* Optimize round of constant value. */
7407 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7411 x = TREE_REAL_CST (arg);
7412 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7414 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7417 real_round (&r, TYPE_MODE (type), &x);
7418 return build_real (type, r);
7422 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7425 /* Fold function call to builtin lround, lroundf or lroundl (or the
7426 corresponding long long versions) and other rounding functions. ARG
7427 is the argument to the call. Return NULL_TREE if no simplification
7431 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7433 if (!validate_arg (arg, REAL_TYPE))
7436 /* Optimize lround of constant value. */
7437 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7439 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7441 if (real_isfinite (&x))
7443 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7444 tree ftype = TREE_TYPE (arg);
7445 unsigned HOST_WIDE_INT lo2;
7446 HOST_WIDE_INT hi, lo;
7449 switch (DECL_FUNCTION_CODE (fndecl))
7451 CASE_FLT_FN (BUILT_IN_LFLOOR):
7452 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7453 real_floor (&r, TYPE_MODE (ftype), &x);
7456 CASE_FLT_FN (BUILT_IN_LCEIL):
7457 CASE_FLT_FN (BUILT_IN_LLCEIL):
7458 real_ceil (&r, TYPE_MODE (ftype), &x);
7461 CASE_FLT_FN (BUILT_IN_LROUND):
7462 CASE_FLT_FN (BUILT_IN_LLROUND):
7463 real_round (&r, TYPE_MODE (ftype), &x);
7470 REAL_VALUE_TO_INT (&lo, &hi, r);
7471 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7472 return build_int_cst_wide (itype, lo2, hi);
7476 switch (DECL_FUNCTION_CODE (fndecl))
7478 CASE_FLT_FN (BUILT_IN_LFLOOR):
7479 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7480 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7481 if (tree_expr_nonnegative_p (arg))
7482 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7483 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7488 return fold_fixed_mathfn (loc, fndecl, arg);
7491 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7492 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7493 the argument to the call. Return NULL_TREE if no simplification can
7497 fold_builtin_bitop (tree fndecl, tree arg)
7499 if (!validate_arg (arg, INTEGER_TYPE))
7502 /* Optimize for constant argument. */
7503 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7505 HOST_WIDE_INT hi, width, result;
7506 unsigned HOST_WIDE_INT lo;
7509 type = TREE_TYPE (arg);
7510 width = TYPE_PRECISION (type);
7511 lo = TREE_INT_CST_LOW (arg);
7513 /* Clear all the bits that are beyond the type's precision. */
7514 if (width > HOST_BITS_PER_WIDE_INT)
7516 hi = TREE_INT_CST_HIGH (arg);
7517 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7518 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7523 if (width < HOST_BITS_PER_WIDE_INT)
7524 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7527 switch (DECL_FUNCTION_CODE (fndecl))
7529 CASE_INT_FN (BUILT_IN_FFS):
7531 result = exact_log2 (lo & -lo) + 1;
7533 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7538 CASE_INT_FN (BUILT_IN_CLZ):
7540 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7542 result = width - floor_log2 (lo) - 1;
7543 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7547 CASE_INT_FN (BUILT_IN_CTZ):
7549 result = exact_log2 (lo & -lo);
7551 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7552 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7556 CASE_INT_FN (BUILT_IN_POPCOUNT):
7559 result++, lo &= lo - 1;
7561 result++, hi &= hi - 1;
7564 CASE_INT_FN (BUILT_IN_PARITY):
7567 result++, lo &= lo - 1;
7569 result++, hi &= hi - 1;
7577 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7583 /* Fold function call to builtin_bswap and the long and long long
7584 variants. Return NULL_TREE if no simplification can be made. */
7586 fold_builtin_bswap (tree fndecl, tree arg)
7588 if (! validate_arg (arg, INTEGER_TYPE))
7591 /* Optimize constant value. */
7592 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7594 HOST_WIDE_INT hi, width, r_hi = 0;
7595 unsigned HOST_WIDE_INT lo, r_lo = 0;
7598 type = TREE_TYPE (arg);
7599 width = TYPE_PRECISION (type);
7600 lo = TREE_INT_CST_LOW (arg);
7601 hi = TREE_INT_CST_HIGH (arg);
7603 switch (DECL_FUNCTION_CODE (fndecl))
7605 case BUILT_IN_BSWAP32:
7606 case BUILT_IN_BSWAP64:
7610 for (s = 0; s < width; s += 8)
7612 int d = width - s - 8;
7613 unsigned HOST_WIDE_INT byte;
7615 if (s < HOST_BITS_PER_WIDE_INT)
7616 byte = (lo >> s) & 0xff;
7618 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7620 if (d < HOST_BITS_PER_WIDE_INT)
7623 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7633 if (width < HOST_BITS_PER_WIDE_INT)
7634 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7636 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7642 /* A subroutine of fold_builtin to fold the various logarithmic
7643 functions. Return NULL_TREE if no simplification can me made.
7644 FUNC is the corresponding MPFR logarithm function. */
7647 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7648 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7650 if (validate_arg (arg, REAL_TYPE))
7652 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7654 const enum built_in_function fcode = builtin_mathfn_code (arg);
7656 /* Calculate the result when the argument is a constant. */
7657 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7660 /* Special case, optimize logN(expN(x)) = x. */
7661 if (flag_unsafe_math_optimizations
7662 && ((func == mpfr_log
7663 && (fcode == BUILT_IN_EXP
7664 || fcode == BUILT_IN_EXPF
7665 || fcode == BUILT_IN_EXPL))
7666 || (func == mpfr_log2
7667 && (fcode == BUILT_IN_EXP2
7668 || fcode == BUILT_IN_EXP2F
7669 || fcode == BUILT_IN_EXP2L))
7670 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7671 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7673 /* Optimize logN(func()) for various exponential functions. We
7674 want to determine the value "x" and the power "exponent" in
7675 order to transform logN(x**exponent) into exponent*logN(x). */
7676 if (flag_unsafe_math_optimizations)
7678 tree exponent = 0, x = 0;
7682 CASE_FLT_FN (BUILT_IN_EXP):
7683 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7684 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7686 exponent = CALL_EXPR_ARG (arg, 0);
7688 CASE_FLT_FN (BUILT_IN_EXP2):
7689 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7690 x = build_real (type, dconst2);
7691 exponent = CALL_EXPR_ARG (arg, 0);
7693 CASE_FLT_FN (BUILT_IN_EXP10):
7694 CASE_FLT_FN (BUILT_IN_POW10):
7695 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7697 REAL_VALUE_TYPE dconst10;
7698 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7699 x = build_real (type, dconst10);
7701 exponent = CALL_EXPR_ARG (arg, 0);
7703 CASE_FLT_FN (BUILT_IN_SQRT):
7704 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7705 x = CALL_EXPR_ARG (arg, 0);
7706 exponent = build_real (type, dconsthalf);
7708 CASE_FLT_FN (BUILT_IN_CBRT):
7709 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7710 x = CALL_EXPR_ARG (arg, 0);
7711 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7714 CASE_FLT_FN (BUILT_IN_POW):
7715 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7716 x = CALL_EXPR_ARG (arg, 0);
7717 exponent = CALL_EXPR_ARG (arg, 1);
7723 /* Now perform the optimization. */
7726 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7727 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7735 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7736 NULL_TREE if no simplification can be made. */
7739 fold_builtin_hypot (location_t loc, tree fndecl,
7740 tree arg0, tree arg1, tree type)
7742 tree res, narg0, narg1;
7744 if (!validate_arg (arg0, REAL_TYPE)
7745 || !validate_arg (arg1, REAL_TYPE))
7748 /* Calculate the result when the argument is a constant. */
7749 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7752 /* If either argument to hypot has a negate or abs, strip that off.
7753 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7754 narg0 = fold_strip_sign_ops (arg0);
7755 narg1 = fold_strip_sign_ops (arg1);
7758 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7759 narg1 ? narg1 : arg1);
7762 /* If either argument is zero, hypot is fabs of the other. */
7763 if (real_zerop (arg0))
7764 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7765 else if (real_zerop (arg1))
7766 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7768 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7769 if (flag_unsafe_math_optimizations
7770 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7772 const REAL_VALUE_TYPE sqrt2_trunc
7773 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7774 return fold_build2_loc (loc, MULT_EXPR, type,
7775 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7776 build_real (type, sqrt2_trunc));
7783 /* Fold a builtin function call to pow, powf, or powl. Return
7784 NULL_TREE if no simplification can be made. */
7786 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7790 if (!validate_arg (arg0, REAL_TYPE)
7791 || !validate_arg (arg1, REAL_TYPE))
7794 /* Calculate the result when the argument is a constant. */
7795 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7798 /* Optimize pow(1.0,y) = 1.0. */
7799 if (real_onep (arg0))
7800 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7802 if (TREE_CODE (arg1) == REAL_CST
7803 && !TREE_OVERFLOW (arg1))
7805 REAL_VALUE_TYPE cint;
7809 c = TREE_REAL_CST (arg1);
7811 /* Optimize pow(x,0.0) = 1.0. */
7812 if (REAL_VALUES_EQUAL (c, dconst0))
7813 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7816 /* Optimize pow(x,1.0) = x. */
7817 if (REAL_VALUES_EQUAL (c, dconst1))
7820 /* Optimize pow(x,-1.0) = 1.0/x. */
7821 if (REAL_VALUES_EQUAL (c, dconstm1))
7822 return fold_build2_loc (loc, RDIV_EXPR, type,
7823 build_real (type, dconst1), arg0);
7825 /* Optimize pow(x,0.5) = sqrt(x). */
7826 if (flag_unsafe_math_optimizations
7827 && REAL_VALUES_EQUAL (c, dconsthalf))
7829 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7831 if (sqrtfn != NULL_TREE)
7832 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7835 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7836 if (flag_unsafe_math_optimizations)
7838 const REAL_VALUE_TYPE dconstroot
7839 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7841 if (REAL_VALUES_EQUAL (c, dconstroot))
7843 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7844 if (cbrtfn != NULL_TREE)
7845 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7849 /* Check for an integer exponent. */
7850 n = real_to_integer (&c);
7851 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7852 if (real_identical (&c, &cint))
7854 /* Attempt to evaluate pow at compile-time, unless this should
7855 raise an exception. */
7856 if (TREE_CODE (arg0) == REAL_CST
7857 && !TREE_OVERFLOW (arg0)
7859 || (!flag_trapping_math && !flag_errno_math)
7860 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7865 x = TREE_REAL_CST (arg0);
7866 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7867 if (flag_unsafe_math_optimizations || !inexact)
7868 return build_real (type, x);
7871 /* Strip sign ops from even integer powers. */
7872 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7874 tree narg0 = fold_strip_sign_ops (arg0);
7876 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7881 if (flag_unsafe_math_optimizations)
7883 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7885 /* Optimize pow(expN(x),y) = expN(x*y). */
7886 if (BUILTIN_EXPONENT_P (fcode))
7888 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7889 tree arg = CALL_EXPR_ARG (arg0, 0);
7890 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7891 return build_call_expr_loc (loc, expfn, 1, arg);
7894 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7895 if (BUILTIN_SQRT_P (fcode))
7897 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7898 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7899 build_real (type, dconsthalf));
7900 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7903 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7904 if (BUILTIN_CBRT_P (fcode))
7906 tree arg = CALL_EXPR_ARG (arg0, 0);
7907 if (tree_expr_nonnegative_p (arg))
7909 const REAL_VALUE_TYPE dconstroot
7910 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7911 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7912 build_real (type, dconstroot));
7913 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7917 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7918 if (fcode == BUILT_IN_POW
7919 || fcode == BUILT_IN_POWF
7920 || fcode == BUILT_IN_POWL)
7922 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7923 if (tree_expr_nonnegative_p (arg00))
7925 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7926 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7927 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7935 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7936 Return NULL_TREE if no simplification can be made. */
7938 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7939 tree arg0, tree arg1, tree type)
7941 if (!validate_arg (arg0, REAL_TYPE)
7942 || !validate_arg (arg1, INTEGER_TYPE))
7945 /* Optimize pow(1.0,y) = 1.0. */
7946 if (real_onep (arg0))
7947 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7949 if (host_integerp (arg1, 0))
7951 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7953 /* Evaluate powi at compile-time. */
7954 if (TREE_CODE (arg0) == REAL_CST
7955 && !TREE_OVERFLOW (arg0))
7958 x = TREE_REAL_CST (arg0);
7959 real_powi (&x, TYPE_MODE (type), &x, c);
7960 return build_real (type, x);
7963 /* Optimize pow(x,0) = 1.0. */
7965 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7968 /* Optimize pow(x,1) = x. */
7972 /* Optimize pow(x,-1) = 1.0/x. */
7974 return fold_build2_loc (loc, RDIV_EXPR, type,
7975 build_real (type, dconst1), arg0);
7981 /* A subroutine of fold_builtin to fold the various exponent
7982 functions. Return NULL_TREE if no simplification can be made.
7983 FUNC is the corresponding MPFR exponent function. */
7986 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7987 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7989 if (validate_arg (arg, REAL_TYPE))
7991 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7994 /* Calculate the result when the argument is a constant. */
7995 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7998 /* Optimize expN(logN(x)) = x. */
7999 if (flag_unsafe_math_optimizations)
8001 const enum built_in_function fcode = builtin_mathfn_code (arg);
8003 if ((func == mpfr_exp
8004 && (fcode == BUILT_IN_LOG
8005 || fcode == BUILT_IN_LOGF
8006 || fcode == BUILT_IN_LOGL))
8007 || (func == mpfr_exp2
8008 && (fcode == BUILT_IN_LOG2
8009 || fcode == BUILT_IN_LOG2F
8010 || fcode == BUILT_IN_LOG2L))
8011 || (func == mpfr_exp10
8012 && (fcode == BUILT_IN_LOG10
8013 || fcode == BUILT_IN_LOG10F
8014 || fcode == BUILT_IN_LOG10L)))
8015 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8022 /* Return true if VAR is a VAR_DECL or a component thereof. */
8025 var_decl_component_p (tree var)
8028 while (handled_component_p (inner))
8029 inner = TREE_OPERAND (inner, 0);
8030 return SSA_VAR_P (inner);
8033 /* Fold function call to builtin memset. Return
8034 NULL_TREE if no simplification can be made. */
8037 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8038 tree type, bool ignore)
8040 tree var, ret, etype;
8041 unsigned HOST_WIDE_INT length, cval;
8043 if (! validate_arg (dest, POINTER_TYPE)
8044 || ! validate_arg (c, INTEGER_TYPE)
8045 || ! validate_arg (len, INTEGER_TYPE))
8048 if (! host_integerp (len, 1))
8051 /* If the LEN parameter is zero, return DEST. */
8052 if (integer_zerop (len))
8053 return omit_one_operand_loc (loc, type, dest, c);
8055 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8060 if (TREE_CODE (var) != ADDR_EXPR)
8063 var = TREE_OPERAND (var, 0);
8064 if (TREE_THIS_VOLATILE (var))
8067 etype = TREE_TYPE (var);
8068 if (TREE_CODE (etype) == ARRAY_TYPE)
8069 etype = TREE_TYPE (etype);
8071 if (!INTEGRAL_TYPE_P (etype)
8072 && !POINTER_TYPE_P (etype))
8075 if (! var_decl_component_p (var))
8078 length = tree_low_cst (len, 1);
8079 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8080 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8084 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8087 if (integer_zerop (c))
8091 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8094 cval = tree_low_cst (c, 1);
8098 cval |= (cval << 31) << 1;
8101 ret = build_int_cst_type (etype, cval);
8102 var = build_fold_indirect_ref_loc (loc,
8103 fold_convert_loc (loc,
8104 build_pointer_type (etype),
8106 ret = build2 (MODIFY_EXPR, etype, var, ret);
8110 return omit_one_operand_loc (loc, type, dest, ret);
8113 /* Fold function call to builtin memset. Return
8114 NULL_TREE if no simplification can be made. */
8117 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8119 if (! validate_arg (dest, POINTER_TYPE)
8120 || ! validate_arg (size, INTEGER_TYPE))
8126 /* New argument list transforming bzero(ptr x, int y) to
8127 memset(ptr x, int 0, size_t y). This is done this way
8128 so that if it isn't expanded inline, we fallback to
8129 calling bzero instead of memset. */
8131 return fold_builtin_memset (loc, dest, integer_zero_node,
8132 fold_convert_loc (loc, sizetype, size),
8133 void_type_node, ignore);
8136 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8137 NULL_TREE if no simplification can be made.
8138 If ENDP is 0, return DEST (like memcpy).
8139 If ENDP is 1, return DEST+LEN (like mempcpy).
8140 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8141 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8145 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8146 tree len, tree type, bool ignore, int endp)
8148 tree destvar, srcvar, expr;
8150 if (! validate_arg (dest, POINTER_TYPE)
8151 || ! validate_arg (src, POINTER_TYPE)
8152 || ! validate_arg (len, INTEGER_TYPE))
8155 /* If the LEN parameter is zero, return DEST. */
8156 if (integer_zerop (len))
8157 return omit_one_operand_loc (loc, type, dest, src);
8159 /* If SRC and DEST are the same (and not volatile), return
8160 DEST{,+LEN,+LEN-1}. */
8161 if (operand_equal_p (src, dest, 0))
8165 tree srctype, desttype;
8166 int src_align, dest_align;
8170 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8171 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8173 /* Both DEST and SRC must be pointer types.
8174 ??? This is what old code did. Is the testing for pointer types
8177 If either SRC is readonly or length is 1, we can use memcpy. */
8178 if (!dest_align || !src_align)
8180 if (readonly_data_expr (src)
8181 || (host_integerp (len, 1)
8182 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8183 >= tree_low_cst (len, 1))))
8185 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8188 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8191 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8192 srcvar = build_fold_indirect_ref_loc (loc, src);
8193 destvar = build_fold_indirect_ref_loc (loc, dest);
8195 && !TREE_THIS_VOLATILE (srcvar)
8197 && !TREE_THIS_VOLATILE (destvar))
8199 tree src_base, dest_base, fn;
8200 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8201 HOST_WIDE_INT size = -1;
8202 HOST_WIDE_INT maxsize = -1;
8205 if (handled_component_p (src_base))
8206 src_base = get_ref_base_and_extent (src_base, &src_offset,
8208 dest_base = destvar;
8209 if (handled_component_p (dest_base))
8210 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8212 if (host_integerp (len, 1))
8214 maxsize = tree_low_cst (len, 1);
8216 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8219 maxsize *= BITS_PER_UNIT;
8223 if (SSA_VAR_P (src_base)
8224 && SSA_VAR_P (dest_base))
8226 if (operand_equal_p (src_base, dest_base, 0)
8227 && ranges_overlap_p (src_offset, maxsize,
8228 dest_offset, maxsize))
8231 else if (TREE_CODE (src_base) == INDIRECT_REF
8232 && TREE_CODE (dest_base) == INDIRECT_REF)
8234 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8235 TREE_OPERAND (dest_base, 0), 0)
8236 || ranges_overlap_p (src_offset, maxsize,
8237 dest_offset, maxsize))
8243 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8246 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8251 if (!host_integerp (len, 0))
8254 This logic lose for arguments like (type *)malloc (sizeof (type)),
8255 since we strip the casts of up to VOID return value from malloc.
8256 Perhaps we ought to inherit type from non-VOID argument here? */
8259 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8260 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8262 tree tem = TREE_OPERAND (src, 0);
8264 if (tem != TREE_OPERAND (src, 0))
8265 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8267 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8269 tree tem = TREE_OPERAND (dest, 0);
8271 if (tem != TREE_OPERAND (dest, 0))
8272 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8274 srctype = TREE_TYPE (TREE_TYPE (src));
8276 && TREE_CODE (srctype) == ARRAY_TYPE
8277 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8279 srctype = TREE_TYPE (srctype);
8281 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8283 desttype = TREE_TYPE (TREE_TYPE (dest));
8285 && TREE_CODE (desttype) == ARRAY_TYPE
8286 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8288 desttype = TREE_TYPE (desttype);
8290 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8292 if (!srctype || !desttype
8293 || !TYPE_SIZE_UNIT (srctype)
8294 || !TYPE_SIZE_UNIT (desttype)
8295 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8296 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8297 || TYPE_VOLATILE (srctype)
8298 || TYPE_VOLATILE (desttype))
8301 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8302 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8303 if (dest_align < (int) TYPE_ALIGN (desttype)
8304 || src_align < (int) TYPE_ALIGN (srctype))
8308 dest = builtin_save_expr (dest);
8311 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8313 srcvar = build_fold_indirect_ref_loc (loc, src);
8314 if (TREE_THIS_VOLATILE (srcvar))
8316 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8318 /* With memcpy, it is possible to bypass aliasing rules, so without
8319 this check i.e. execute/20060930-2.c would be misoptimized,
8320 because it use conflicting alias set to hold argument for the
8321 memcpy call. This check is probably unnecessary with
8322 -fno-strict-aliasing. Similarly for destvar. See also
8324 else if (!var_decl_component_p (srcvar))
8328 destvar = NULL_TREE;
8329 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8331 destvar = build_fold_indirect_ref_loc (loc, dest);
8332 if (TREE_THIS_VOLATILE (destvar))
8334 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8335 destvar = NULL_TREE;
8336 else if (!var_decl_component_p (destvar))
8337 destvar = NULL_TREE;
8340 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8343 if (srcvar == NULL_TREE)
8346 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8349 srctype = build_qualified_type (desttype, 0);
8350 if (src_align < (int) TYPE_ALIGN (srctype))
8352 if (AGGREGATE_TYPE_P (srctype)
8353 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8356 srctype = build_variant_type_copy (srctype);
8357 TYPE_ALIGN (srctype) = src_align;
8358 TYPE_USER_ALIGN (srctype) = 1;
8359 TYPE_PACKED (srctype) = 1;
8361 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8362 src = fold_convert_loc (loc, srcptype, src);
8363 srcvar = build_fold_indirect_ref_loc (loc, src);
8365 else if (destvar == NULL_TREE)
8368 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8371 desttype = build_qualified_type (srctype, 0);
8372 if (dest_align < (int) TYPE_ALIGN (desttype))
8374 if (AGGREGATE_TYPE_P (desttype)
8375 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8378 desttype = build_variant_type_copy (desttype);
8379 TYPE_ALIGN (desttype) = dest_align;
8380 TYPE_USER_ALIGN (desttype) = 1;
8381 TYPE_PACKED (desttype) = 1;
8383 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8384 dest = fold_convert_loc (loc, destptype, dest);
8385 destvar = build_fold_indirect_ref_loc (loc, dest);
8388 if (srctype == desttype
8389 || (gimple_in_ssa_p (cfun)
8390 && useless_type_conversion_p (desttype, srctype)))
8392 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8393 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8394 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8395 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8396 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8398 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8399 TREE_TYPE (destvar), srcvar);
8400 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8406 if (endp == 0 || endp == 3)
8407 return omit_one_operand_loc (loc, type, dest, expr);
8413 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8416 len = fold_convert_loc (loc, sizetype, len);
8417 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8418 dest = fold_convert_loc (loc, type, dest);
8420 dest = omit_one_operand_loc (loc, type, dest, expr);
8424 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8425 If LEN is not NULL, it represents the length of the string to be
8426 copied. Return NULL_TREE if no simplification can be made. */
8429 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8433 if (!validate_arg (dest, POINTER_TYPE)
8434 || !validate_arg (src, POINTER_TYPE))
8437 /* If SRC and DEST are the same (and not volatile), return DEST. */
8438 if (operand_equal_p (src, dest, 0))
8439 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8441 if (optimize_function_for_size_p (cfun))
8444 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8450 len = c_strlen (src, 1);
8451 if (! len || TREE_SIDE_EFFECTS (len))
8455 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8456 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8457 build_call_expr_loc (loc, fn, 3, dest, src, len));
8460 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8461 Return NULL_TREE if no simplification can be made. */
8464 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8466 tree fn, len, lenp1, call, type;
8468 if (!validate_arg (dest, POINTER_TYPE)
8469 || !validate_arg (src, POINTER_TYPE))
8472 len = c_strlen (src, 1);
8474 || TREE_CODE (len) != INTEGER_CST)
8477 if (optimize_function_for_size_p (cfun)
8478 /* If length is zero it's small enough. */
8479 && !integer_zerop (len))
8482 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8486 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8487 /* We use dest twice in building our expression. Save it from
8488 multiple expansions. */
8489 dest = builtin_save_expr (dest);
8490 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8492 type = TREE_TYPE (TREE_TYPE (fndecl));
8493 len = fold_convert_loc (loc, sizetype, len);
8494 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8495 dest = fold_convert_loc (loc, type, dest);
8496 dest = omit_one_operand_loc (loc, type, dest, call);
8500 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8501 If SLEN is not NULL, it represents the length of the source string.
8502 Return NULL_TREE if no simplification can be made. */
8505 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8506 tree src, tree len, tree slen)
8510 if (!validate_arg (dest, POINTER_TYPE)
8511 || !validate_arg (src, POINTER_TYPE)
8512 || !validate_arg (len, INTEGER_TYPE))
8515 /* If the LEN parameter is zero, return DEST. */
8516 if (integer_zerop (len))
8517 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8519 /* We can't compare slen with len as constants below if len is not a
8521 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8525 slen = c_strlen (src, 1);
8527 /* Now, we must be passed a constant src ptr parameter. */
8528 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8531 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8533 /* We do not support simplification of this case, though we do
8534 support it when expanding trees into RTL. */
8535 /* FIXME: generate a call to __builtin_memset. */
8536 if (tree_int_cst_lt (slen, len))
8539 /* OK transform into builtin memcpy. */
8540 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8543 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8544 build_call_expr_loc (loc, fn, 3, dest, src, len));
8547 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8548 arguments to the call, and TYPE is its return type.
8549 Return NULL_TREE if no simplification can be made. */
8552 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8554 if (!validate_arg (arg1, POINTER_TYPE)
8555 || !validate_arg (arg2, INTEGER_TYPE)
8556 || !validate_arg (len, INTEGER_TYPE))
8562 if (TREE_CODE (arg2) != INTEGER_CST
8563 || !host_integerp (len, 1))
8566 p1 = c_getstr (arg1);
8567 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8573 if (target_char_cast (arg2, &c))
8576 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8579 return build_int_cst (TREE_TYPE (arg1), 0);
8581 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8583 return fold_convert_loc (loc, type, tem);
8589 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8590 Return NULL_TREE if no simplification can be made. */
8593 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8595 const char *p1, *p2;
8597 if (!validate_arg (arg1, POINTER_TYPE)
8598 || !validate_arg (arg2, POINTER_TYPE)
8599 || !validate_arg (len, INTEGER_TYPE))
8602 /* If the LEN parameter is zero, return zero. */
8603 if (integer_zerop (len))
8604 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8607 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8608 if (operand_equal_p (arg1, arg2, 0))
8609 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8611 p1 = c_getstr (arg1);
8612 p2 = c_getstr (arg2);
8614 /* If all arguments are constant, and the value of len is not greater
8615 than the lengths of arg1 and arg2, evaluate at compile-time. */
8616 if (host_integerp (len, 1) && p1 && p2
8617 && compare_tree_int (len, strlen (p1) + 1) <= 0
8618 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8620 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8623 return integer_one_node;
8625 return integer_minus_one_node;
8627 return integer_zero_node;
8630 /* If len parameter is one, return an expression corresponding to
8631 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8632 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8634 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8635 tree cst_uchar_ptr_node
8636 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8639 = fold_convert_loc (loc, integer_type_node,
8640 build1 (INDIRECT_REF, cst_uchar_node,
8641 fold_convert_loc (loc,
8645 = fold_convert_loc (loc, integer_type_node,
8646 build1 (INDIRECT_REF, cst_uchar_node,
8647 fold_convert_loc (loc,
8650 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8656 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8657 Return NULL_TREE if no simplification can be made. */
8660 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8662 const char *p1, *p2;
8664 if (!validate_arg (arg1, POINTER_TYPE)
8665 || !validate_arg (arg2, POINTER_TYPE))
8668 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8669 if (operand_equal_p (arg1, arg2, 0))
8670 return integer_zero_node;
8672 p1 = c_getstr (arg1);
8673 p2 = c_getstr (arg2);
8677 const int i = strcmp (p1, p2);
8679 return integer_minus_one_node;
8681 return integer_one_node;
8683 return integer_zero_node;
8686 /* If the second arg is "", return *(const unsigned char*)arg1. */
8687 if (p2 && *p2 == '\0')
8689 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8690 tree cst_uchar_ptr_node
8691 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8693 return fold_convert_loc (loc, integer_type_node,
8694 build1 (INDIRECT_REF, cst_uchar_node,
8695 fold_convert_loc (loc,
8700 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8701 if (p1 && *p1 == '\0')
8703 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8704 tree cst_uchar_ptr_node
8705 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8708 = fold_convert_loc (loc, integer_type_node,
8709 build1 (INDIRECT_REF, cst_uchar_node,
8710 fold_convert_loc (loc,
8713 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8719 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8720 Return NULL_TREE if no simplification can be made. */
8723 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8725 const char *p1, *p2;
8727 if (!validate_arg (arg1, POINTER_TYPE)
8728 || !validate_arg (arg2, POINTER_TYPE)
8729 || !validate_arg (len, INTEGER_TYPE))
8732 /* If the LEN parameter is zero, return zero. */
8733 if (integer_zerop (len))
8734 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8737 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8738 if (operand_equal_p (arg1, arg2, 0))
8739 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8741 p1 = c_getstr (arg1);
8742 p2 = c_getstr (arg2);
8744 if (host_integerp (len, 1) && p1 && p2)
8746 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8748 return integer_one_node;
8750 return integer_minus_one_node;
8752 return integer_zero_node;
8755 /* If the second arg is "", and the length is greater than zero,
8756 return *(const unsigned char*)arg1. */
8757 if (p2 && *p2 == '\0'
8758 && TREE_CODE (len) == INTEGER_CST
8759 && tree_int_cst_sgn (len) == 1)
8761 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8762 tree cst_uchar_ptr_node
8763 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8765 return fold_convert_loc (loc, integer_type_node,
8766 build1 (INDIRECT_REF, cst_uchar_node,
8767 fold_convert_loc (loc,
8772 /* If the first arg is "", and the length is greater than zero,
8773 return -*(const unsigned char*)arg2. */
8774 if (p1 && *p1 == '\0'
8775 && TREE_CODE (len) == INTEGER_CST
8776 && tree_int_cst_sgn (len) == 1)
8778 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8779 tree cst_uchar_ptr_node
8780 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8782 tree temp = fold_convert_loc (loc, integer_type_node,
8783 build1 (INDIRECT_REF, cst_uchar_node,
8784 fold_convert_loc (loc,
8787 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8790 /* If len parameter is one, return an expression corresponding to
8791 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8792 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8794 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8795 tree cst_uchar_ptr_node
8796 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8798 tree ind1 = fold_convert_loc (loc, integer_type_node,
8799 build1 (INDIRECT_REF, cst_uchar_node,
8800 fold_convert_loc (loc,
8803 tree ind2 = fold_convert_loc (loc, integer_type_node,
8804 build1 (INDIRECT_REF, cst_uchar_node,
8805 fold_convert_loc (loc,
8808 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8814 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8815 ARG. Return NULL_TREE if no simplification can be made. */
8818 fold_builtin_signbit (location_t loc, tree arg, tree type)
8822 if (!validate_arg (arg, REAL_TYPE))
8825 /* If ARG is a compile-time constant, determine the result. */
8826 if (TREE_CODE (arg) == REAL_CST
8827 && !TREE_OVERFLOW (arg))
8831 c = TREE_REAL_CST (arg);
8832 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8833 return fold_convert_loc (loc, type, temp);
8836 /* If ARG is non-negative, the result is always zero. */
8837 if (tree_expr_nonnegative_p (arg))
8838 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8840 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8841 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8842 return fold_build2_loc (loc, LT_EXPR, type, arg,
8843 build_real (TREE_TYPE (arg), dconst0));
8848 /* Fold function call to builtin copysign, copysignf or copysignl with
8849 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8853 fold_builtin_copysign (location_t loc, tree fndecl,
8854 tree arg1, tree arg2, tree type)
8858 if (!validate_arg (arg1, REAL_TYPE)
8859 || !validate_arg (arg2, REAL_TYPE))
8862 /* copysign(X,X) is X. */
8863 if (operand_equal_p (arg1, arg2, 0))
8864 return fold_convert_loc (loc, type, arg1);
8866 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8867 if (TREE_CODE (arg1) == REAL_CST
8868 && TREE_CODE (arg2) == REAL_CST
8869 && !TREE_OVERFLOW (arg1)
8870 && !TREE_OVERFLOW (arg2))
8872 REAL_VALUE_TYPE c1, c2;
8874 c1 = TREE_REAL_CST (arg1);
8875 c2 = TREE_REAL_CST (arg2);
8876 /* c1.sign := c2.sign. */
8877 real_copysign (&c1, &c2);
8878 return build_real (type, c1);
8881 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8882 Remember to evaluate Y for side-effects. */
8883 if (tree_expr_nonnegative_p (arg2))
8884 return omit_one_operand_loc (loc, type,
8885 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8888 /* Strip sign changing operations for the first argument. */
8889 tem = fold_strip_sign_ops (arg1);
8891 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8896 /* Fold a call to builtin isascii with argument ARG. */
8899 fold_builtin_isascii (location_t loc, tree arg)
8901 if (!validate_arg (arg, INTEGER_TYPE))
8905 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8906 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8907 build_int_cst (NULL_TREE,
8908 ~ (unsigned HOST_WIDE_INT) 0x7f));
8909 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8910 arg, integer_zero_node);
8914 /* Fold a call to builtin toascii with argument ARG. */
8917 fold_builtin_toascii (location_t loc, tree arg)
8919 if (!validate_arg (arg, INTEGER_TYPE))
8922 /* Transform toascii(c) -> (c & 0x7f). */
8923 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8924 build_int_cst (NULL_TREE, 0x7f));
8927 /* Fold a call to builtin isdigit with argument ARG. */
8930 fold_builtin_isdigit (location_t loc, tree arg)
8932 if (!validate_arg (arg, INTEGER_TYPE))
8936 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8937 /* According to the C standard, isdigit is unaffected by locale.
8938 However, it definitely is affected by the target character set. */
8939 unsigned HOST_WIDE_INT target_digit0
8940 = lang_hooks.to_target_charset ('0');
8942 if (target_digit0 == 0)
8945 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8946 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8947 build_int_cst (unsigned_type_node, target_digit0));
8948 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8949 build_int_cst (unsigned_type_node, 9));
8953 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8956 fold_builtin_fabs (location_t loc, tree arg, tree type)
8958 if (!validate_arg (arg, REAL_TYPE))
8961 arg = fold_convert_loc (loc, type, arg);
8962 if (TREE_CODE (arg) == REAL_CST)
8963 return fold_abs_const (arg, type);
8964 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8967 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8970 fold_builtin_abs (location_t loc, tree arg, tree type)
8972 if (!validate_arg (arg, INTEGER_TYPE))
8975 arg = fold_convert_loc (loc, type, arg);
8976 if (TREE_CODE (arg) == INTEGER_CST)
8977 return fold_abs_const (arg, type);
8978 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8981 /* Fold a call to builtin fmin or fmax. */
8984 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8985 tree type, bool max)
8987 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8989 /* Calculate the result when the argument is a constant. */
8990 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8995 /* If either argument is NaN, return the other one. Avoid the
8996 transformation if we get (and honor) a signalling NaN. Using
8997 omit_one_operand() ensures we create a non-lvalue. */
8998 if (TREE_CODE (arg0) == REAL_CST
8999 && real_isnan (&TREE_REAL_CST (arg0))
9000 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9001 || ! TREE_REAL_CST (arg0).signalling))
9002 return omit_one_operand_loc (loc, type, arg1, arg0);
9003 if (TREE_CODE (arg1) == REAL_CST
9004 && real_isnan (&TREE_REAL_CST (arg1))
9005 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9006 || ! TREE_REAL_CST (arg1).signalling))
9007 return omit_one_operand_loc (loc, type, arg0, arg1);
9009 /* Transform fmin/fmax(x,x) -> x. */
9010 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9011 return omit_one_operand_loc (loc, type, arg0, arg1);
9013 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9014 functions to return the numeric arg if the other one is NaN.
9015 These tree codes don't honor that, so only transform if
9016 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9017 handled, so we don't have to worry about it either. */
9018 if (flag_finite_math_only)
9019 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9020 fold_convert_loc (loc, type, arg0),
9021 fold_convert_loc (loc, type, arg1));
9026 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9029 fold_builtin_carg (location_t loc, tree arg, tree type)
9031 if (validate_arg (arg, COMPLEX_TYPE)
9032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9034 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9038 tree new_arg = builtin_save_expr (arg);
9039 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9040 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9041 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9048 /* Fold a call to builtin logb/ilogb. */
9051 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9053 if (! validate_arg (arg, REAL_TYPE))
9058 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9060 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9066 /* If arg is Inf or NaN and we're logb, return it. */
9067 if (TREE_CODE (rettype) == REAL_TYPE)
9068 return fold_convert_loc (loc, rettype, arg);
9069 /* Fall through... */
9071 /* Zero may set errno and/or raise an exception for logb, also
9072 for ilogb we don't know FP_ILOGB0. */
9075 /* For normal numbers, proceed iff radix == 2. In GCC,
9076 normalized significands are in the range [0.5, 1.0). We
9077 want the exponent as if they were [1.0, 2.0) so get the
9078 exponent and subtract 1. */
9079 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9080 return fold_convert_loc (loc, rettype,
9081 build_int_cst (NULL_TREE,
9082 REAL_EXP (value)-1));
9090 /* Fold a call to builtin significand, if radix == 2. */
9093 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9095 if (! validate_arg (arg, REAL_TYPE))
9100 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9102 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9109 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9110 return fold_convert_loc (loc, rettype, arg);
9112 /* For normal numbers, proceed iff radix == 2. */
9113 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9115 REAL_VALUE_TYPE result = *value;
9116 /* In GCC, normalized significands are in the range [0.5,
9117 1.0). We want them to be [1.0, 2.0) so set the
9119 SET_REAL_EXP (&result, 1);
9120 return build_real (rettype, result);
9129 /* Fold a call to builtin frexp, we can assume the base is 2. */
9132 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9134 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9139 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9142 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9144 /* Proceed if a valid pointer type was passed in. */
9145 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9147 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9153 /* For +-0, return (*exp = 0, +-0). */
9154 exp = integer_zero_node;
9159 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9160 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9163 /* Since the frexp function always expects base 2, and in
9164 GCC normalized significands are already in the range
9165 [0.5, 1.0), we have exactly what frexp wants. */
9166 REAL_VALUE_TYPE frac_rvt = *value;
9167 SET_REAL_EXP (&frac_rvt, 0);
9168 frac = build_real (rettype, frac_rvt);
9169 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9176 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9177 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9178 TREE_SIDE_EFFECTS (arg1) = 1;
9179 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9185 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9186 then we can assume the base is two. If it's false, then we have to
9187 check the mode of the TYPE parameter in certain cases. */
9190 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9191 tree type, bool ldexp)
9193 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9198 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9199 if (real_zerop (arg0) || integer_zerop (arg1)
9200 || (TREE_CODE (arg0) == REAL_CST
9201 && !real_isfinite (&TREE_REAL_CST (arg0))))
9202 return omit_one_operand_loc (loc, type, arg0, arg1);
9204 /* If both arguments are constant, then try to evaluate it. */
9205 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9206 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9207 && host_integerp (arg1, 0))
9209 /* Bound the maximum adjustment to twice the range of the
9210 mode's valid exponents. Use abs to ensure the range is
9211 positive as a sanity check. */
9212 const long max_exp_adj = 2 *
9213 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9214 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9216 /* Get the user-requested adjustment. */
9217 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9219 /* The requested adjustment must be inside this range. This
9220 is a preliminary cap to avoid things like overflow, we
9221 may still fail to compute the result for other reasons. */
9222 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9224 REAL_VALUE_TYPE initial_result;
9226 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9228 /* Ensure we didn't overflow. */
9229 if (! real_isinf (&initial_result))
9231 const REAL_VALUE_TYPE trunc_result
9232 = real_value_truncate (TYPE_MODE (type), initial_result);
9234 /* Only proceed if the target mode can hold the
9236 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9237 return build_real (type, trunc_result);
9246 /* Fold a call to builtin modf. */
9249 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9251 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9256 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9259 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9261 /* Proceed if a valid pointer type was passed in. */
9262 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9264 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9265 REAL_VALUE_TYPE trunc, frac;
9271 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9272 trunc = frac = *value;
9275 /* For +-Inf, return (*arg1 = arg0, +-0). */
9277 frac.sign = value->sign;
9281 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9282 real_trunc (&trunc, VOIDmode, value);
9283 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9284 /* If the original number was negative and already
9285 integral, then the fractional part is -0.0. */
9286 if (value->sign && frac.cl == rvc_zero)
9287 frac.sign = value->sign;
9291 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9292 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9293 build_real (rettype, trunc));
9294 TREE_SIDE_EFFECTS (arg1) = 1;
9295 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9296 build_real (rettype, frac));
9302 /* Given a location LOC, an interclass builtin function decl FNDECL
9303 and its single argument ARG, return an folded expression computing
9304 the same, or NULL_TREE if we either couldn't or didn't want to fold
9305 (the latter happen if there's an RTL instruction available). */
9308 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9310 enum machine_mode mode;
9312 if (!validate_arg (arg, REAL_TYPE))
9315 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9318 mode = TYPE_MODE (TREE_TYPE (arg));
9320 /* If there is no optab, try generic code. */
9321 switch (DECL_FUNCTION_CODE (fndecl))
9325 CASE_FLT_FN (BUILT_IN_ISINF):
9327 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9328 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9329 tree const type = TREE_TYPE (arg);
9333 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9334 real_from_string (&r, buf);
9335 result = build_call_expr (isgr_fn, 2,
9336 fold_build1_loc (loc, ABS_EXPR, type, arg),
9337 build_real (type, r));
9340 CASE_FLT_FN (BUILT_IN_FINITE):
9341 case BUILT_IN_ISFINITE:
9343 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9344 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9345 tree const type = TREE_TYPE (arg);
9349 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9350 real_from_string (&r, buf);
9351 result = build_call_expr (isle_fn, 2,
9352 fold_build1_loc (loc, ABS_EXPR, type, arg),
9353 build_real (type, r));
9354 /*result = fold_build2_loc (loc, UNGT_EXPR,
9355 TREE_TYPE (TREE_TYPE (fndecl)),
9356 fold_build1_loc (loc, ABS_EXPR, type, arg),
9357 build_real (type, r));
9358 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9359 TREE_TYPE (TREE_TYPE (fndecl)),
9363 case BUILT_IN_ISNORMAL:
9365 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9366 islessequal(fabs(x),DBL_MAX). */
9367 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9368 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9369 tree const type = TREE_TYPE (arg);
9370 REAL_VALUE_TYPE rmax, rmin;
9373 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9374 real_from_string (&rmax, buf);
9375 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9376 real_from_string (&rmin, buf);
9377 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9378 result = build_call_expr (isle_fn, 2, arg,
9379 build_real (type, rmax));
9380 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9381 build_call_expr (isge_fn, 2, arg,
9382 build_real (type, rmin)));
9392 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9393 ARG is the argument for the call. */
9396 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9398 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9401 if (!validate_arg (arg, REAL_TYPE))
9404 switch (builtin_index)
9406 case BUILT_IN_ISINF:
9407 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9408 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9410 if (TREE_CODE (arg) == REAL_CST)
9412 r = TREE_REAL_CST (arg);
9413 if (real_isinf (&r))
9414 return real_compare (GT_EXPR, &r, &dconst0)
9415 ? integer_one_node : integer_minus_one_node;
9417 return integer_zero_node;
9422 case BUILT_IN_ISINF_SIGN:
9424 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9425 /* In a boolean context, GCC will fold the inner COND_EXPR to
9426 1. So e.g. "if (isinf_sign(x))" would be folded to just
9427 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9428 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9429 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9430 tree tmp = NULL_TREE;
9432 arg = builtin_save_expr (arg);
9434 if (signbit_fn && isinf_fn)
9436 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9437 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9439 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9440 signbit_call, integer_zero_node);
9441 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9442 isinf_call, integer_zero_node);
9444 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9445 integer_minus_one_node, integer_one_node);
9446 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9454 case BUILT_IN_ISFINITE:
9455 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9456 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9457 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9459 if (TREE_CODE (arg) == REAL_CST)
9461 r = TREE_REAL_CST (arg);
9462 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9467 case BUILT_IN_ISNAN:
9468 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9469 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9471 if (TREE_CODE (arg) == REAL_CST)
9473 r = TREE_REAL_CST (arg);
9474 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9477 arg = builtin_save_expr (arg);
9478 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9485 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9486 This builtin will generate code to return the appropriate floating
9487 point classification depending on the value of the floating point
9488 number passed in. The possible return values must be supplied as
9489 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9490 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9491 one floating point argument which is "type generic". */
9494 fold_builtin_fpclassify (location_t loc, tree exp)
9496 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9497 arg, type, res, tmp;
9498 enum machine_mode mode;
9502 /* Verify the required arguments in the original call. */
9503 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9504 INTEGER_TYPE, INTEGER_TYPE,
9505 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9508 fp_nan = CALL_EXPR_ARG (exp, 0);
9509 fp_infinite = CALL_EXPR_ARG (exp, 1);
9510 fp_normal = CALL_EXPR_ARG (exp, 2);
9511 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9512 fp_zero = CALL_EXPR_ARG (exp, 4);
9513 arg = CALL_EXPR_ARG (exp, 5);
9514 type = TREE_TYPE (arg);
9515 mode = TYPE_MODE (type);
9516 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9520 (fabs(x) == Inf ? FP_INFINITE :
9521 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9522 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9524 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9525 build_real (type, dconst0));
9526 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9527 tmp, fp_zero, fp_subnormal);
9529 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9530 real_from_string (&r, buf);
9531 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9532 arg, build_real (type, r));
9533 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9535 if (HONOR_INFINITIES (mode))
9538 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9539 build_real (type, r));
9540 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9544 if (HONOR_NANS (mode))
9546 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9547 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9553 /* Fold a call to an unordered comparison function such as
9554 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9555 being called and ARG0 and ARG1 are the arguments for the call.
9556 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9557 the opposite of the desired result. UNORDERED_CODE is used
9558 for modes that can hold NaNs and ORDERED_CODE is used for
9562 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9563 enum tree_code unordered_code,
9564 enum tree_code ordered_code)
9566 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9567 enum tree_code code;
9569 enum tree_code code0, code1;
9570 tree cmp_type = NULL_TREE;
9572 type0 = TREE_TYPE (arg0);
9573 type1 = TREE_TYPE (arg1);
9575 code0 = TREE_CODE (type0);
9576 code1 = TREE_CODE (type1);
9578 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9579 /* Choose the wider of two real types. */
9580 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9582 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9584 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9587 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9588 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9590 if (unordered_code == UNORDERED_EXPR)
9592 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9593 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9594 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9597 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9599 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9600 fold_build2_loc (loc, code, type, arg0, arg1));
9603 /* Fold a call to built-in function FNDECL with 0 arguments.
9604 IGNORE is true if the result of the function call is ignored. This
9605 function returns NULL_TREE if no simplification was possible. */
9608 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9610 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9611 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9614 CASE_FLT_FN (BUILT_IN_INF):
9615 case BUILT_IN_INFD32:
9616 case BUILT_IN_INFD64:
9617 case BUILT_IN_INFD128:
9618 return fold_builtin_inf (loc, type, true);
9620 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9621 return fold_builtin_inf (loc, type, false);
9623 case BUILT_IN_CLASSIFY_TYPE:
9624 return fold_builtin_classify_type (NULL_TREE);
9632 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9633 IGNORE is true if the result of the function call is ignored. This
9634 function returns NULL_TREE if no simplification was possible. */
9637 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9640 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9644 case BUILT_IN_CONSTANT_P:
9646 tree val = fold_builtin_constant_p (arg0);
9648 /* Gimplification will pull the CALL_EXPR for the builtin out of
9649 an if condition. When not optimizing, we'll not CSE it back.
9650 To avoid link error types of regressions, return false now. */
9651 if (!val && !optimize)
9652 val = integer_zero_node;
9657 case BUILT_IN_CLASSIFY_TYPE:
9658 return fold_builtin_classify_type (arg0);
9660 case BUILT_IN_STRLEN:
9661 return fold_builtin_strlen (loc, arg0);
9663 CASE_FLT_FN (BUILT_IN_FABS):
9664 return fold_builtin_fabs (loc, arg0, type);
9668 case BUILT_IN_LLABS:
9669 case BUILT_IN_IMAXABS:
9670 return fold_builtin_abs (loc, arg0, type);
9672 CASE_FLT_FN (BUILT_IN_CONJ):
9673 if (validate_arg (arg0, COMPLEX_TYPE)
9674 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9675 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9678 CASE_FLT_FN (BUILT_IN_CREAL):
9679 if (validate_arg (arg0, COMPLEX_TYPE)
9680 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9681 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9684 CASE_FLT_FN (BUILT_IN_CIMAG):
9685 if (validate_arg (arg0, COMPLEX_TYPE)
9686 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9687 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9690 CASE_FLT_FN (BUILT_IN_CCOS):
9691 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9693 CASE_FLT_FN (BUILT_IN_CCOSH):
9694 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9696 CASE_FLT_FN (BUILT_IN_CSIN):
9697 if (validate_arg (arg0, COMPLEX_TYPE)
9698 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9699 return do_mpc_arg1 (arg0, type, mpc_sin);
9702 CASE_FLT_FN (BUILT_IN_CSINH):
9703 if (validate_arg (arg0, COMPLEX_TYPE)
9704 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9705 return do_mpc_arg1 (arg0, type, mpc_sinh);
9708 CASE_FLT_FN (BUILT_IN_CTAN):
9709 if (validate_arg (arg0, COMPLEX_TYPE)
9710 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9711 return do_mpc_arg1 (arg0, type, mpc_tan);
9714 CASE_FLT_FN (BUILT_IN_CTANH):
9715 if (validate_arg (arg0, COMPLEX_TYPE)
9716 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9717 return do_mpc_arg1 (arg0, type, mpc_tanh);
9720 CASE_FLT_FN (BUILT_IN_CLOG):
9721 if (validate_arg (arg0, COMPLEX_TYPE)
9722 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9723 return do_mpc_arg1 (arg0, type, mpc_log);
9726 CASE_FLT_FN (BUILT_IN_CSQRT):
9727 if (validate_arg (arg0, COMPLEX_TYPE)
9728 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9729 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9732 CASE_FLT_FN (BUILT_IN_CASIN):
9733 if (validate_arg (arg0, COMPLEX_TYPE)
9734 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9735 return do_mpc_arg1 (arg0, type, mpc_asin);
9738 CASE_FLT_FN (BUILT_IN_CACOS):
9739 if (validate_arg (arg0, COMPLEX_TYPE)
9740 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9741 return do_mpc_arg1 (arg0, type, mpc_acos);
9744 CASE_FLT_FN (BUILT_IN_CATAN):
9745 if (validate_arg (arg0, COMPLEX_TYPE)
9746 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9747 return do_mpc_arg1 (arg0, type, mpc_atan);
9750 CASE_FLT_FN (BUILT_IN_CASINH):
9751 if (validate_arg (arg0, COMPLEX_TYPE)
9752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9753 return do_mpc_arg1 (arg0, type, mpc_asinh);
9756 CASE_FLT_FN (BUILT_IN_CACOSH):
9757 if (validate_arg (arg0, COMPLEX_TYPE)
9758 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9759 return do_mpc_arg1 (arg0, type, mpc_acosh);
9762 CASE_FLT_FN (BUILT_IN_CATANH):
9763 if (validate_arg (arg0, COMPLEX_TYPE)
9764 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9765 return do_mpc_arg1 (arg0, type, mpc_atanh);
9768 CASE_FLT_FN (BUILT_IN_CABS):
9769 return fold_builtin_cabs (loc, arg0, type, fndecl);
9771 CASE_FLT_FN (BUILT_IN_CARG):
9772 return fold_builtin_carg (loc, arg0, type);
9774 CASE_FLT_FN (BUILT_IN_SQRT):
9775 return fold_builtin_sqrt (loc, arg0, type);
9777 CASE_FLT_FN (BUILT_IN_CBRT):
9778 return fold_builtin_cbrt (loc, arg0, type);
9780 CASE_FLT_FN (BUILT_IN_ASIN):
9781 if (validate_arg (arg0, REAL_TYPE))
9782 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9783 &dconstm1, &dconst1, true);
9786 CASE_FLT_FN (BUILT_IN_ACOS):
9787 if (validate_arg (arg0, REAL_TYPE))
9788 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9789 &dconstm1, &dconst1, true);
9792 CASE_FLT_FN (BUILT_IN_ATAN):
9793 if (validate_arg (arg0, REAL_TYPE))
9794 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9797 CASE_FLT_FN (BUILT_IN_ASINH):
9798 if (validate_arg (arg0, REAL_TYPE))
9799 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9802 CASE_FLT_FN (BUILT_IN_ACOSH):
9803 if (validate_arg (arg0, REAL_TYPE))
9804 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9805 &dconst1, NULL, true);
9808 CASE_FLT_FN (BUILT_IN_ATANH):
9809 if (validate_arg (arg0, REAL_TYPE))
9810 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9811 &dconstm1, &dconst1, false);
9814 CASE_FLT_FN (BUILT_IN_SIN):
9815 if (validate_arg (arg0, REAL_TYPE))
9816 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9819 CASE_FLT_FN (BUILT_IN_COS):
9820 return fold_builtin_cos (loc, arg0, type, fndecl);
9822 CASE_FLT_FN (BUILT_IN_TAN):
9823 return fold_builtin_tan (arg0, type);
9825 CASE_FLT_FN (BUILT_IN_CEXP):
9826 return fold_builtin_cexp (loc, arg0, type);
9828 CASE_FLT_FN (BUILT_IN_CEXPI):
9829 if (validate_arg (arg0, REAL_TYPE))
9830 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9833 CASE_FLT_FN (BUILT_IN_SINH):
9834 if (validate_arg (arg0, REAL_TYPE))
9835 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9838 CASE_FLT_FN (BUILT_IN_COSH):
9839 return fold_builtin_cosh (loc, arg0, type, fndecl);
9841 CASE_FLT_FN (BUILT_IN_TANH):
9842 if (validate_arg (arg0, REAL_TYPE))
9843 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9846 CASE_FLT_FN (BUILT_IN_ERF):
9847 if (validate_arg (arg0, REAL_TYPE))
9848 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9851 CASE_FLT_FN (BUILT_IN_ERFC):
9852 if (validate_arg (arg0, REAL_TYPE))
9853 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9856 CASE_FLT_FN (BUILT_IN_TGAMMA):
9857 if (validate_arg (arg0, REAL_TYPE))
9858 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9861 CASE_FLT_FN (BUILT_IN_EXP):
9862 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9864 CASE_FLT_FN (BUILT_IN_EXP2):
9865 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9867 CASE_FLT_FN (BUILT_IN_EXP10):
9868 CASE_FLT_FN (BUILT_IN_POW10):
9869 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9871 CASE_FLT_FN (BUILT_IN_EXPM1):
9872 if (validate_arg (arg0, REAL_TYPE))
9873 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9876 CASE_FLT_FN (BUILT_IN_LOG):
9877 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9879 CASE_FLT_FN (BUILT_IN_LOG2):
9880 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9882 CASE_FLT_FN (BUILT_IN_LOG10):
9883 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9885 CASE_FLT_FN (BUILT_IN_LOG1P):
9886 if (validate_arg (arg0, REAL_TYPE))
9887 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9888 &dconstm1, NULL, false);
9891 CASE_FLT_FN (BUILT_IN_J0):
9892 if (validate_arg (arg0, REAL_TYPE))
9893 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9897 CASE_FLT_FN (BUILT_IN_J1):
9898 if (validate_arg (arg0, REAL_TYPE))
9899 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9903 CASE_FLT_FN (BUILT_IN_Y0):
9904 if (validate_arg (arg0, REAL_TYPE))
9905 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9906 &dconst0, NULL, false);
9909 CASE_FLT_FN (BUILT_IN_Y1):
9910 if (validate_arg (arg0, REAL_TYPE))
9911 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9912 &dconst0, NULL, false);
9915 CASE_FLT_FN (BUILT_IN_NAN):
9916 case BUILT_IN_NAND32:
9917 case BUILT_IN_NAND64:
9918 case BUILT_IN_NAND128:
9919 return fold_builtin_nan (arg0, type, true);
9921 CASE_FLT_FN (BUILT_IN_NANS):
9922 return fold_builtin_nan (arg0, type, false);
9924 CASE_FLT_FN (BUILT_IN_FLOOR):
9925 return fold_builtin_floor (loc, fndecl, arg0);
9927 CASE_FLT_FN (BUILT_IN_CEIL):
9928 return fold_builtin_ceil (loc, fndecl, arg0);
9930 CASE_FLT_FN (BUILT_IN_TRUNC):
9931 return fold_builtin_trunc (loc, fndecl, arg0);
9933 CASE_FLT_FN (BUILT_IN_ROUND):
9934 return fold_builtin_round (loc, fndecl, arg0);
9936 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9937 CASE_FLT_FN (BUILT_IN_RINT):
9938 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9940 CASE_FLT_FN (BUILT_IN_LCEIL):
9941 CASE_FLT_FN (BUILT_IN_LLCEIL):
9942 CASE_FLT_FN (BUILT_IN_LFLOOR):
9943 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9944 CASE_FLT_FN (BUILT_IN_LROUND):
9945 CASE_FLT_FN (BUILT_IN_LLROUND):
9946 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9948 CASE_FLT_FN (BUILT_IN_LRINT):
9949 CASE_FLT_FN (BUILT_IN_LLRINT):
9950 return fold_fixed_mathfn (loc, fndecl, arg0);
9952 case BUILT_IN_BSWAP32:
9953 case BUILT_IN_BSWAP64:
9954 return fold_builtin_bswap (fndecl, arg0);
9956 CASE_INT_FN (BUILT_IN_FFS):
9957 CASE_INT_FN (BUILT_IN_CLZ):
9958 CASE_INT_FN (BUILT_IN_CTZ):
9959 CASE_INT_FN (BUILT_IN_POPCOUNT):
9960 CASE_INT_FN (BUILT_IN_PARITY):
9961 return fold_builtin_bitop (fndecl, arg0);
9963 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9964 return fold_builtin_signbit (loc, arg0, type);
9966 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9967 return fold_builtin_significand (loc, arg0, type);
9969 CASE_FLT_FN (BUILT_IN_ILOGB):
9970 CASE_FLT_FN (BUILT_IN_LOGB):
9971 return fold_builtin_logb (loc, arg0, type);
9973 case BUILT_IN_ISASCII:
9974 return fold_builtin_isascii (loc, arg0);
9976 case BUILT_IN_TOASCII:
9977 return fold_builtin_toascii (loc, arg0);
9979 case BUILT_IN_ISDIGIT:
9980 return fold_builtin_isdigit (loc, arg0);
9982 CASE_FLT_FN (BUILT_IN_FINITE):
9983 case BUILT_IN_FINITED32:
9984 case BUILT_IN_FINITED64:
9985 case BUILT_IN_FINITED128:
9986 case BUILT_IN_ISFINITE:
9988 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9991 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9994 CASE_FLT_FN (BUILT_IN_ISINF):
9995 case BUILT_IN_ISINFD32:
9996 case BUILT_IN_ISINFD64:
9997 case BUILT_IN_ISINFD128:
9999 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10002 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10005 case BUILT_IN_ISNORMAL:
10006 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10008 case BUILT_IN_ISINF_SIGN:
10009 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10011 CASE_FLT_FN (BUILT_IN_ISNAN):
10012 case BUILT_IN_ISNAND32:
10013 case BUILT_IN_ISNAND64:
10014 case BUILT_IN_ISNAND128:
10015 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10017 case BUILT_IN_PRINTF:
10018 case BUILT_IN_PRINTF_UNLOCKED:
10019 case BUILT_IN_VPRINTF:
10020 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10030 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10031 IGNORE is true if the result of the function call is ignored. This
10032 function returns NULL_TREE if no simplification was possible. */
10035 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10037 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10038 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10042 CASE_FLT_FN (BUILT_IN_JN):
10043 if (validate_arg (arg0, INTEGER_TYPE)
10044 && validate_arg (arg1, REAL_TYPE))
10045 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10048 CASE_FLT_FN (BUILT_IN_YN):
10049 if (validate_arg (arg0, INTEGER_TYPE)
10050 && validate_arg (arg1, REAL_TYPE))
10051 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10055 CASE_FLT_FN (BUILT_IN_DREM):
10056 CASE_FLT_FN (BUILT_IN_REMAINDER):
10057 if (validate_arg (arg0, REAL_TYPE)
10058 && validate_arg(arg1, REAL_TYPE))
10059 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10062 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10063 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10064 if (validate_arg (arg0, REAL_TYPE)
10065 && validate_arg(arg1, POINTER_TYPE))
10066 return do_mpfr_lgamma_r (arg0, arg1, type);
10069 CASE_FLT_FN (BUILT_IN_ATAN2):
10070 if (validate_arg (arg0, REAL_TYPE)
10071 && validate_arg(arg1, REAL_TYPE))
10072 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10075 CASE_FLT_FN (BUILT_IN_FDIM):
10076 if (validate_arg (arg0, REAL_TYPE)
10077 && validate_arg(arg1, REAL_TYPE))
10078 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10081 CASE_FLT_FN (BUILT_IN_HYPOT):
10082 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10084 CASE_FLT_FN (BUILT_IN_CPOW):
10085 if (validate_arg (arg0, COMPLEX_TYPE)
10086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10087 && validate_arg (arg1, COMPLEX_TYPE)
10088 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10089 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10092 CASE_FLT_FN (BUILT_IN_LDEXP):
10093 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10094 CASE_FLT_FN (BUILT_IN_SCALBN):
10095 CASE_FLT_FN (BUILT_IN_SCALBLN):
10096 return fold_builtin_load_exponent (loc, arg0, arg1,
10097 type, /*ldexp=*/false);
10099 CASE_FLT_FN (BUILT_IN_FREXP):
10100 return fold_builtin_frexp (loc, arg0, arg1, type);
10102 CASE_FLT_FN (BUILT_IN_MODF):
10103 return fold_builtin_modf (loc, arg0, arg1, type);
10105 case BUILT_IN_BZERO:
10106 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10108 case BUILT_IN_FPUTS:
10109 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10111 case BUILT_IN_FPUTS_UNLOCKED:
10112 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10114 case BUILT_IN_STRSTR:
10115 return fold_builtin_strstr (loc, arg0, arg1, type);
10117 case BUILT_IN_STRCAT:
10118 return fold_builtin_strcat (loc, arg0, arg1);
10120 case BUILT_IN_STRSPN:
10121 return fold_builtin_strspn (loc, arg0, arg1);
10123 case BUILT_IN_STRCSPN:
10124 return fold_builtin_strcspn (loc, arg0, arg1);
10126 case BUILT_IN_STRCHR:
10127 case BUILT_IN_INDEX:
10128 return fold_builtin_strchr (loc, arg0, arg1, type);
10130 case BUILT_IN_STRRCHR:
10131 case BUILT_IN_RINDEX:
10132 return fold_builtin_strrchr (loc, arg0, arg1, type);
10134 case BUILT_IN_STRCPY:
10135 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10137 case BUILT_IN_STPCPY:
10140 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10144 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10147 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10150 case BUILT_IN_STRCMP:
10151 return fold_builtin_strcmp (loc, arg0, arg1);
10153 case BUILT_IN_STRPBRK:
10154 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10156 case BUILT_IN_EXPECT:
10157 return fold_builtin_expect (loc, arg0, arg1);
10159 CASE_FLT_FN (BUILT_IN_POW):
10160 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10162 CASE_FLT_FN (BUILT_IN_POWI):
10163 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10165 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10166 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10168 CASE_FLT_FN (BUILT_IN_FMIN):
10169 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10171 CASE_FLT_FN (BUILT_IN_FMAX):
10172 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10174 case BUILT_IN_ISGREATER:
10175 return fold_builtin_unordered_cmp (loc, fndecl,
10176 arg0, arg1, UNLE_EXPR, LE_EXPR);
10177 case BUILT_IN_ISGREATEREQUAL:
10178 return fold_builtin_unordered_cmp (loc, fndecl,
10179 arg0, arg1, UNLT_EXPR, LT_EXPR);
10180 case BUILT_IN_ISLESS:
10181 return fold_builtin_unordered_cmp (loc, fndecl,
10182 arg0, arg1, UNGE_EXPR, GE_EXPR);
10183 case BUILT_IN_ISLESSEQUAL:
10184 return fold_builtin_unordered_cmp (loc, fndecl,
10185 arg0, arg1, UNGT_EXPR, GT_EXPR);
10186 case BUILT_IN_ISLESSGREATER:
10187 return fold_builtin_unordered_cmp (loc, fndecl,
10188 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10189 case BUILT_IN_ISUNORDERED:
10190 return fold_builtin_unordered_cmp (loc, fndecl,
10191 arg0, arg1, UNORDERED_EXPR,
10194 /* We do the folding for va_start in the expander. */
10195 case BUILT_IN_VA_START:
10198 case BUILT_IN_SPRINTF:
10199 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10201 case BUILT_IN_OBJECT_SIZE:
10202 return fold_builtin_object_size (arg0, arg1);
10204 case BUILT_IN_PRINTF:
10205 case BUILT_IN_PRINTF_UNLOCKED:
10206 case BUILT_IN_VPRINTF:
10207 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10209 case BUILT_IN_PRINTF_CHK:
10210 case BUILT_IN_VPRINTF_CHK:
10211 if (!validate_arg (arg0, INTEGER_TYPE)
10212 || TREE_SIDE_EFFECTS (arg0))
10215 return fold_builtin_printf (loc, fndecl,
10216 arg1, NULL_TREE, ignore, fcode);
10219 case BUILT_IN_FPRINTF:
10220 case BUILT_IN_FPRINTF_UNLOCKED:
10221 case BUILT_IN_VFPRINTF:
10222 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10231 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10232 and ARG2. IGNORE is true if the result of the function call is ignored.
10233 This function returns NULL_TREE if no simplification was possible. */
10236 fold_builtin_3 (location_t loc, tree fndecl,
10237 tree arg0, tree arg1, tree arg2, bool ignore)
10239 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10240 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10244 CASE_FLT_FN (BUILT_IN_SINCOS):
10245 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10247 CASE_FLT_FN (BUILT_IN_FMA):
10248 if (validate_arg (arg0, REAL_TYPE)
10249 && validate_arg(arg1, REAL_TYPE)
10250 && validate_arg(arg2, REAL_TYPE))
10251 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10254 CASE_FLT_FN (BUILT_IN_REMQUO):
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg(arg1, REAL_TYPE)
10257 && validate_arg(arg2, POINTER_TYPE))
10258 return do_mpfr_remquo (arg0, arg1, arg2);
10261 case BUILT_IN_MEMSET:
10262 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10264 case BUILT_IN_BCOPY:
10265 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10266 void_type_node, true, /*endp=*/3);
10268 case BUILT_IN_MEMCPY:
10269 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10270 type, ignore, /*endp=*/0);
10272 case BUILT_IN_MEMPCPY:
10273 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10274 type, ignore, /*endp=*/1);
10276 case BUILT_IN_MEMMOVE:
10277 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10278 type, ignore, /*endp=*/3);
10280 case BUILT_IN_STRNCAT:
10281 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10283 case BUILT_IN_STRNCPY:
10284 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10286 case BUILT_IN_STRNCMP:
10287 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10289 case BUILT_IN_MEMCHR:
10290 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10292 case BUILT_IN_BCMP:
10293 case BUILT_IN_MEMCMP:
10294 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10296 case BUILT_IN_SPRINTF:
10297 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10299 case BUILT_IN_STRCPY_CHK:
10300 case BUILT_IN_STPCPY_CHK:
10301 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10304 case BUILT_IN_STRCAT_CHK:
10305 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10307 case BUILT_IN_PRINTF_CHK:
10308 case BUILT_IN_VPRINTF_CHK:
10309 if (!validate_arg (arg0, INTEGER_TYPE)
10310 || TREE_SIDE_EFFECTS (arg0))
10313 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10316 case BUILT_IN_FPRINTF:
10317 case BUILT_IN_FPRINTF_UNLOCKED:
10318 case BUILT_IN_VFPRINTF:
10319 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10322 case BUILT_IN_FPRINTF_CHK:
10323 case BUILT_IN_VFPRINTF_CHK:
10324 if (!validate_arg (arg1, INTEGER_TYPE)
10325 || TREE_SIDE_EFFECTS (arg1))
10328 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10337 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10338 ARG2, and ARG3. IGNORE is true if the result of the function call is
10339 ignored. This function returns NULL_TREE if no simplification was
10343 fold_builtin_4 (location_t loc, tree fndecl,
10344 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10346 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10350 case BUILT_IN_MEMCPY_CHK:
10351 case BUILT_IN_MEMPCPY_CHK:
10352 case BUILT_IN_MEMMOVE_CHK:
10353 case BUILT_IN_MEMSET_CHK:
10354 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10356 DECL_FUNCTION_CODE (fndecl));
10358 case BUILT_IN_STRNCPY_CHK:
10359 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10361 case BUILT_IN_STRNCAT_CHK:
10362 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10364 case BUILT_IN_FPRINTF_CHK:
10365 case BUILT_IN_VFPRINTF_CHK:
10366 if (!validate_arg (arg1, INTEGER_TYPE)
10367 || TREE_SIDE_EFFECTS (arg1))
10370 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10380 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10381 arguments, where NARGS <= 4. IGNORE is true if the result of the
10382 function call is ignored. This function returns NULL_TREE if no
10383 simplification was possible. Note that this only folds builtins with
10384 fixed argument patterns. Foldings that do varargs-to-varargs
10385 transformations, or that match calls with more than 4 arguments,
10386 need to be handled with fold_builtin_varargs instead. */
10388 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10391 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10393 tree ret = NULL_TREE;
10398 ret = fold_builtin_0 (loc, fndecl, ignore);
10401 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10404 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10407 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10410 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10418 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10419 SET_EXPR_LOCATION (ret, loc);
10420 TREE_NO_WARNING (ret) = 1;
10426 /* Builtins with folding operations that operate on "..." arguments
10427 need special handling; we need to store the arguments in a convenient
10428 data structure before attempting any folding. Fortunately there are
10429 only a few builtins that fall into this category. FNDECL is the
10430 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10431 result of the function call is ignored. */
10434 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10435 bool ignore ATTRIBUTE_UNUSED)
10437 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10438 tree ret = NULL_TREE;
10442 case BUILT_IN_SPRINTF_CHK:
10443 case BUILT_IN_VSPRINTF_CHK:
10444 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10447 case BUILT_IN_SNPRINTF_CHK:
10448 case BUILT_IN_VSNPRINTF_CHK:
10449 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10452 case BUILT_IN_FPCLASSIFY:
10453 ret = fold_builtin_fpclassify (loc, exp);
10461 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10462 SET_EXPR_LOCATION (ret, loc);
10463 TREE_NO_WARNING (ret) = 1;
10469 /* Return true if FNDECL shouldn't be folded right now.
10470 If a built-in function has an inline attribute always_inline
10471 wrapper, defer folding it after always_inline functions have
10472 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10473 might not be performed. */
10476 avoid_folding_inline_builtin (tree fndecl)
10478 return (DECL_DECLARED_INLINE_P (fndecl)
10479 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10481 && !cfun->always_inline_functions_inlined
10482 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10485 /* A wrapper function for builtin folding that prevents warnings for
10486 "statement without effect" and the like, caused by removing the
10487 call node earlier than the warning is generated. */
10490 fold_call_expr (location_t loc, tree exp, bool ignore)
10492 tree ret = NULL_TREE;
10493 tree fndecl = get_callee_fndecl (exp);
10495 && TREE_CODE (fndecl) == FUNCTION_DECL
10496 && DECL_BUILT_IN (fndecl)
10497 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10498 yet. Defer folding until we see all the arguments
10499 (after inlining). */
10500 && !CALL_EXPR_VA_ARG_PACK (exp))
10502 int nargs = call_expr_nargs (exp);
10504 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10505 instead last argument is __builtin_va_arg_pack (). Defer folding
10506 even in that case, until arguments are finalized. */
10507 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10509 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10511 && TREE_CODE (fndecl2) == FUNCTION_DECL
10512 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10513 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10517 if (avoid_folding_inline_builtin (fndecl))
10520 /* FIXME: Don't use a list in this interface. */
10521 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10522 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10525 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10527 tree *args = CALL_EXPR_ARGP (exp);
10528 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10531 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10539 /* Conveniently construct a function call expression. FNDECL names the
10540 function to be called and ARGLIST is a TREE_LIST of arguments. */
10543 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10545 tree fntype = TREE_TYPE (fndecl);
10546 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10547 int n = list_length (arglist);
10548 tree *argarray = (tree *) alloca (n * sizeof (tree));
10551 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10552 argarray[i] = TREE_VALUE (arglist);
10553 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10556 /* Conveniently construct a function call expression. FNDECL names the
10557 function to be called, N is the number of arguments, and the "..."
10558 parameters are the argument expressions. */
10561 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10564 tree fntype = TREE_TYPE (fndecl);
10565 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10566 tree *argarray = (tree *) alloca (n * sizeof (tree));
10570 for (i = 0; i < n; i++)
10571 argarray[i] = va_arg (ap, tree);
10573 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10576 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10577 N arguments are passed in the array ARGARRAY. */
10580 fold_builtin_call_array (location_t loc, tree type,
10585 tree ret = NULL_TREE;
10589 if (TREE_CODE (fn) == ADDR_EXPR)
10591 tree fndecl = TREE_OPERAND (fn, 0);
10592 if (TREE_CODE (fndecl) == FUNCTION_DECL
10593 && DECL_BUILT_IN (fndecl))
10595 /* If last argument is __builtin_va_arg_pack (), arguments to this
10596 function are not finalized yet. Defer folding until they are. */
10597 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10599 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10601 && TREE_CODE (fndecl2) == FUNCTION_DECL
10602 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10603 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10604 return build_call_array_loc (loc, type, fn, n, argarray);
10606 if (avoid_folding_inline_builtin (fndecl))
10607 return build_call_array_loc (loc, type, fn, n, argarray);
10608 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10610 tree arglist = NULL_TREE;
10611 for (i = n - 1; i >= 0; i--)
10612 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10613 ret = targetm.fold_builtin (fndecl, arglist, false);
10616 return build_call_array_loc (loc, type, fn, n, argarray);
10618 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10620 /* First try the transformations that don't require consing up
10622 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10627 /* If we got this far, we need to build an exp. */
10628 exp = build_call_array_loc (loc, type, fn, n, argarray);
10629 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10630 return ret ? ret : exp;
10634 return build_call_array_loc (loc, type, fn, n, argarray);
10637 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10638 along with N new arguments specified as the "..." parameters. SKIP
10639 is the number of arguments in EXP to be omitted. This function is used
10640 to do varargs-to-varargs transformations. */
10643 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10645 int oldnargs = call_expr_nargs (exp);
10646 int nargs = oldnargs - skip + n;
10647 tree fntype = TREE_TYPE (fndecl);
10648 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10656 buffer = XALLOCAVEC (tree, nargs);
10658 for (i = 0; i < n; i++)
10659 buffer[i] = va_arg (ap, tree);
10661 for (j = skip; j < oldnargs; j++, i++)
10662 buffer[i] = CALL_EXPR_ARG (exp, j);
10665 buffer = CALL_EXPR_ARGP (exp) + skip;
10667 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10670 /* Validate a single argument ARG against a tree code CODE representing
10674 validate_arg (const_tree arg, enum tree_code code)
10678 else if (code == POINTER_TYPE)
10679 return POINTER_TYPE_P (TREE_TYPE (arg));
10680 else if (code == INTEGER_TYPE)
10681 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10682 return code == TREE_CODE (TREE_TYPE (arg));
10685 /* This function validates the types of a function call argument list
10686 against a specified list of tree_codes. If the last specifier is a 0,
10687 that represents an ellipses, otherwise the last specifier must be a
10690 This is the GIMPLE version of validate_arglist. Eventually we want to
10691 completely convert builtins.c to work from GIMPLEs and the tree based
10692 validate_arglist will then be removed. */
10695 validate_gimple_arglist (const_gimple call, ...)
10697 enum tree_code code;
10703 va_start (ap, call);
10708 code = (enum tree_code) va_arg (ap, int);
10712 /* This signifies an ellipses, any further arguments are all ok. */
10716 /* This signifies an endlink, if no arguments remain, return
10717 true, otherwise return false. */
10718 res = (i == gimple_call_num_args (call));
10721 /* If no parameters remain or the parameter's code does not
10722 match the specified code, return false. Otherwise continue
10723 checking any remaining arguments. */
10724 arg = gimple_call_arg (call, i++);
10725 if (!validate_arg (arg, code))
10732 /* We need gotos here since we can only have one VA_CLOSE in a
10740 /* This function validates the types of a function call argument list
10741 against a specified list of tree_codes. If the last specifier is a 0,
10742 that represents an ellipses, otherwise the last specifier must be a
10746 validate_arglist (const_tree callexpr, ...)
10748 enum tree_code code;
10751 const_call_expr_arg_iterator iter;
10754 va_start (ap, callexpr);
10755 init_const_call_expr_arg_iterator (callexpr, &iter);
10759 code = (enum tree_code) va_arg (ap, int);
10763 /* This signifies an ellipses, any further arguments are all ok. */
10767 /* This signifies an endlink, if no arguments remain, return
10768 true, otherwise return false. */
10769 res = !more_const_call_expr_args_p (&iter);
10772 /* If no parameters remain or the parameter's code does not
10773 match the specified code, return false. Otherwise continue
10774 checking any remaining arguments. */
10775 arg = next_const_call_expr_arg (&iter);
10776 if (!validate_arg (arg, code))
10783 /* We need gotos here since we can only have one VA_CLOSE in a
10791 /* Default target-specific builtin expander that does nothing. */
10794 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10795 rtx target ATTRIBUTE_UNUSED,
10796 rtx subtarget ATTRIBUTE_UNUSED,
10797 enum machine_mode mode ATTRIBUTE_UNUSED,
10798 int ignore ATTRIBUTE_UNUSED)
10803 /* Returns true is EXP represents data that would potentially reside
10804 in a readonly section. */
10807 readonly_data_expr (tree exp)
10811 if (TREE_CODE (exp) != ADDR_EXPR)
10814 exp = get_base_address (TREE_OPERAND (exp, 0));
10818 /* Make sure we call decl_readonly_section only for trees it
10819 can handle (since it returns true for everything it doesn't
10821 if (TREE_CODE (exp) == STRING_CST
10822 || TREE_CODE (exp) == CONSTRUCTOR
10823 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10824 return decl_readonly_section (exp, 0);
10829 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10830 to the call, and TYPE is its return type.
10832 Return NULL_TREE if no simplification was possible, otherwise return the
10833 simplified form of the call as a tree.
10835 The simplified form may be a constant or other expression which
10836 computes the same value, but in a more efficient manner (including
10837 calls to other builtin functions).
10839 The call may contain arguments which need to be evaluated, but
10840 which are not useful to determine the result of the call. In
10841 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10842 COMPOUND_EXPR will be an argument which must be evaluated.
10843 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10844 COMPOUND_EXPR in the chain will contain the tree for the simplified
10845 form of the builtin function call. */
10848 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10850 if (!validate_arg (s1, POINTER_TYPE)
10851 || !validate_arg (s2, POINTER_TYPE))
10856 const char *p1, *p2;
10858 p2 = c_getstr (s2);
10862 p1 = c_getstr (s1);
10865 const char *r = strstr (p1, p2);
10869 return build_int_cst (TREE_TYPE (s1), 0);
10871 /* Return an offset into the constant string argument. */
10872 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10873 s1, size_int (r - p1));
10874 return fold_convert_loc (loc, type, tem);
10877 /* The argument is const char *, and the result is char *, so we need
10878 a type conversion here to avoid a warning. */
10880 return fold_convert_loc (loc, type, s1);
10885 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10889 /* New argument list transforming strstr(s1, s2) to
10890 strchr(s1, s2[0]). */
10891 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10895 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10896 the call, and TYPE is its return type.
10898 Return NULL_TREE if no simplification was possible, otherwise return the
10899 simplified form of the call as a tree.
10901 The simplified form may be a constant or other expression which
10902 computes the same value, but in a more efficient manner (including
10903 calls to other builtin functions).
10905 The call may contain arguments which need to be evaluated, but
10906 which are not useful to determine the result of the call. In
10907 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10908 COMPOUND_EXPR will be an argument which must be evaluated.
10909 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10910 COMPOUND_EXPR in the chain will contain the tree for the simplified
10911 form of the builtin function call. */
10914 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10916 if (!validate_arg (s1, POINTER_TYPE)
10917 || !validate_arg (s2, INTEGER_TYPE))
10923 if (TREE_CODE (s2) != INTEGER_CST)
10926 p1 = c_getstr (s1);
10933 if (target_char_cast (s2, &c))
10936 r = strchr (p1, c);
10939 return build_int_cst (TREE_TYPE (s1), 0);
10941 /* Return an offset into the constant string argument. */
10942 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10943 s1, size_int (r - p1));
10944 return fold_convert_loc (loc, type, tem);
10950 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10951 the call, and TYPE is its return type.
10953 Return NULL_TREE if no simplification was possible, otherwise return the
10954 simplified form of the call as a tree.
10956 The simplified form may be a constant or other expression which
10957 computes the same value, but in a more efficient manner (including
10958 calls to other builtin functions).
10960 The call may contain arguments which need to be evaluated, but
10961 which are not useful to determine the result of the call. In
10962 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10963 COMPOUND_EXPR will be an argument which must be evaluated.
10964 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10965 COMPOUND_EXPR in the chain will contain the tree for the simplified
10966 form of the builtin function call. */
10969 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10971 if (!validate_arg (s1, POINTER_TYPE)
10972 || !validate_arg (s2, INTEGER_TYPE))
10979 if (TREE_CODE (s2) != INTEGER_CST)
10982 p1 = c_getstr (s1);
10989 if (target_char_cast (s2, &c))
10992 r = strrchr (p1, c);
10995 return build_int_cst (TREE_TYPE (s1), 0);
10997 /* Return an offset into the constant string argument. */
10998 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10999 s1, size_int (r - p1));
11000 return fold_convert_loc (loc, type, tem);
11003 if (! integer_zerop (s2))
11006 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11010 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11011 return build_call_expr_loc (loc, fn, 2, s1, s2);
11015 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11016 to the call, and TYPE is its return type.
11018 Return NULL_TREE if no simplification was possible, otherwise return the
11019 simplified form of the call as a tree.
11021 The simplified form may be a constant or other expression which
11022 computes the same value, but in a more efficient manner (including
11023 calls to other builtin functions).
11025 The call may contain arguments which need to be evaluated, but
11026 which are not useful to determine the result of the call. In
11027 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11028 COMPOUND_EXPR will be an argument which must be evaluated.
11029 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11030 COMPOUND_EXPR in the chain will contain the tree for the simplified
11031 form of the builtin function call. */
11034 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11036 if (!validate_arg (s1, POINTER_TYPE)
11037 || !validate_arg (s2, POINTER_TYPE))
11042 const char *p1, *p2;
11044 p2 = c_getstr (s2);
11048 p1 = c_getstr (s1);
11051 const char *r = strpbrk (p1, p2);
11055 return build_int_cst (TREE_TYPE (s1), 0);
11057 /* Return an offset into the constant string argument. */
11058 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11059 s1, size_int (r - p1));
11060 return fold_convert_loc (loc, type, tem);
11064 /* strpbrk(x, "") == NULL.
11065 Evaluate and ignore s1 in case it had side-effects. */
11066 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11069 return NULL_TREE; /* Really call strpbrk. */
11071 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11075 /* New argument list transforming strpbrk(s1, s2) to
11076 strchr(s1, s2[0]). */
11077 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11081 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11084 Return NULL_TREE if no simplification was possible, otherwise return the
11085 simplified form of the call as a tree.
11087 The simplified form may be a constant or other expression which
11088 computes the same value, but in a more efficient manner (including
11089 calls to other builtin functions).
11091 The call may contain arguments which need to be evaluated, but
11092 which are not useful to determine the result of the call. In
11093 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11094 COMPOUND_EXPR will be an argument which must be evaluated.
11095 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11096 COMPOUND_EXPR in the chain will contain the tree for the simplified
11097 form of the builtin function call. */
11100 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11102 if (!validate_arg (dst, POINTER_TYPE)
11103 || !validate_arg (src, POINTER_TYPE))
11107 const char *p = c_getstr (src);
11109 /* If the string length is zero, return the dst parameter. */
11110 if (p && *p == '\0')
11113 if (optimize_insn_for_speed_p ())
11115 /* See if we can store by pieces into (dst + strlen(dst)). */
11117 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11118 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11120 if (!strlen_fn || !strcpy_fn)
11123 /* If we don't have a movstr we don't want to emit an strcpy
11124 call. We have to do that if the length of the source string
11125 isn't computable (in that case we can use memcpy probably
11126 later expanding to a sequence of mov instructions). If we
11127 have movstr instructions we can emit strcpy calls. */
11130 tree len = c_strlen (src, 1);
11131 if (! len || TREE_SIDE_EFFECTS (len))
11135 /* Stabilize the argument list. */
11136 dst = builtin_save_expr (dst);
11138 /* Create strlen (dst). */
11139 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11140 /* Create (dst p+ strlen (dst)). */
11142 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11143 TREE_TYPE (dst), dst, newdst);
11144 newdst = builtin_save_expr (newdst);
11146 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11147 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11153 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11154 arguments to the call.
11156 Return NULL_TREE if no simplification was possible, otherwise return the
11157 simplified form of the call as a tree.
11159 The simplified form may be a constant or other expression which
11160 computes the same value, but in a more efficient manner (including
11161 calls to other builtin functions).
11163 The call may contain arguments which need to be evaluated, but
11164 which are not useful to determine the result of the call. In
11165 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11166 COMPOUND_EXPR will be an argument which must be evaluated.
11167 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11168 COMPOUND_EXPR in the chain will contain the tree for the simplified
11169 form of the builtin function call. */
11172 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11174 if (!validate_arg (dst, POINTER_TYPE)
11175 || !validate_arg (src, POINTER_TYPE)
11176 || !validate_arg (len, INTEGER_TYPE))
11180 const char *p = c_getstr (src);
11182 /* If the requested length is zero, or the src parameter string
11183 length is zero, return the dst parameter. */
11184 if (integer_zerop (len) || (p && *p == '\0'))
11185 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11187 /* If the requested len is greater than or equal to the string
11188 length, call strcat. */
11189 if (TREE_CODE (len) == INTEGER_CST && p
11190 && compare_tree_int (len, strlen (p)) >= 0)
11192 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11194 /* If the replacement _DECL isn't initialized, don't do the
11199 return build_call_expr_loc (loc, fn, 2, dst, src);
11205 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11208 Return NULL_TREE if no simplification was possible, otherwise return the
11209 simplified form of the call as a tree.
11211 The simplified form may be a constant or other expression which
11212 computes the same value, but in a more efficient manner (including
11213 calls to other builtin functions).
11215 The call may contain arguments which need to be evaluated, but
11216 which are not useful to determine the result of the call. In
11217 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11218 COMPOUND_EXPR will be an argument which must be evaluated.
11219 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11220 COMPOUND_EXPR in the chain will contain the tree for the simplified
11221 form of the builtin function call. */
11224 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11226 if (!validate_arg (s1, POINTER_TYPE)
11227 || !validate_arg (s2, POINTER_TYPE))
11231 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11233 /* If both arguments are constants, evaluate at compile-time. */
11236 const size_t r = strspn (p1, p2);
11237 return size_int (r);
11240 /* If either argument is "", return NULL_TREE. */
11241 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11242 /* Evaluate and ignore both arguments in case either one has
11244 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11250 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11253 Return NULL_TREE if no simplification was possible, otherwise return the
11254 simplified form of the call as a tree.
11256 The simplified form may be a constant or other expression which
11257 computes the same value, but in a more efficient manner (including
11258 calls to other builtin functions).
11260 The call may contain arguments which need to be evaluated, but
11261 which are not useful to determine the result of the call. In
11262 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11263 COMPOUND_EXPR will be an argument which must be evaluated.
11264 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11265 COMPOUND_EXPR in the chain will contain the tree for the simplified
11266 form of the builtin function call. */
11269 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11271 if (!validate_arg (s1, POINTER_TYPE)
11272 || !validate_arg (s2, POINTER_TYPE))
11276 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11278 /* If both arguments are constants, evaluate at compile-time. */
11281 const size_t r = strcspn (p1, p2);
11282 return size_int (r);
11285 /* If the first argument is "", return NULL_TREE. */
11286 if (p1 && *p1 == '\0')
11288 /* Evaluate and ignore argument s2 in case it has
11290 return omit_one_operand_loc (loc, size_type_node,
11291 size_zero_node, s2);
11294 /* If the second argument is "", return __builtin_strlen(s1). */
11295 if (p2 && *p2 == '\0')
11297 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11299 /* If the replacement _DECL isn't initialized, don't do the
11304 return build_call_expr_loc (loc, fn, 1, s1);
11310 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11311 to the call. IGNORE is true if the value returned
11312 by the builtin will be ignored. UNLOCKED is true is true if this
11313 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11314 the known length of the string. Return NULL_TREE if no simplification
11318 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11319 bool ignore, bool unlocked, tree len)
11321 /* If we're using an unlocked function, assume the other unlocked
11322 functions exist explicitly. */
11323 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11324 : implicit_built_in_decls[BUILT_IN_FPUTC];
11325 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11326 : implicit_built_in_decls[BUILT_IN_FWRITE];
11328 /* If the return value is used, don't do the transformation. */
11332 /* Verify the arguments in the original call. */
11333 if (!validate_arg (arg0, POINTER_TYPE)
11334 || !validate_arg (arg1, POINTER_TYPE))
11338 len = c_strlen (arg0, 0);
11340 /* Get the length of the string passed to fputs. If the length
11341 can't be determined, punt. */
11343 || TREE_CODE (len) != INTEGER_CST)
11346 switch (compare_tree_int (len, 1))
11348 case -1: /* length is 0, delete the call entirely . */
11349 return omit_one_operand_loc (loc, integer_type_node,
11350 integer_zero_node, arg1);;
11352 case 0: /* length is 1, call fputc. */
11354 const char *p = c_getstr (arg0);
11359 return build_call_expr_loc (loc, fn_fputc, 2,
11360 build_int_cst (NULL_TREE, p[0]), arg1);
11366 case 1: /* length is greater than 1, call fwrite. */
11368 /* If optimizing for size keep fputs. */
11369 if (optimize_function_for_size_p (cfun))
11371 /* New argument list transforming fputs(string, stream) to
11372 fwrite(string, 1, len, stream). */
11374 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11375 size_one_node, len, arg1);
11380 gcc_unreachable ();
11385 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11386 produced. False otherwise. This is done so that we don't output the error
11387 or warning twice or three times. */
11390 fold_builtin_next_arg (tree exp, bool va_start_p)
11392 tree fntype = TREE_TYPE (current_function_decl);
11393 int nargs = call_expr_nargs (exp);
11396 if (TYPE_ARG_TYPES (fntype) == 0
11397 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11398 == void_type_node))
11400 error ("%<va_start%> used in function with fixed args");
11406 if (va_start_p && (nargs != 2))
11408 error ("wrong number of arguments to function %<va_start%>");
11411 arg = CALL_EXPR_ARG (exp, 1);
11413 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11414 when we checked the arguments and if needed issued a warning. */
11419 /* Evidently an out of date version of <stdarg.h>; can't validate
11420 va_start's second argument, but can still work as intended. */
11421 warning (0, "%<__builtin_next_arg%> called without an argument");
11424 else if (nargs > 1)
11426 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11429 arg = CALL_EXPR_ARG (exp, 0);
11432 if (TREE_CODE (arg) == SSA_NAME)
11433 arg = SSA_NAME_VAR (arg);
11435 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11436 or __builtin_next_arg (0) the first time we see it, after checking
11437 the arguments and if needed issuing a warning. */
11438 if (!integer_zerop (arg))
11440 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11442 /* Strip off all nops for the sake of the comparison. This
11443 is not quite the same as STRIP_NOPS. It does more.
11444 We must also strip off INDIRECT_EXPR for C++ reference
11446 while (CONVERT_EXPR_P (arg)
11447 || TREE_CODE (arg) == INDIRECT_REF)
11448 arg = TREE_OPERAND (arg, 0);
11449 if (arg != last_parm)
11451 /* FIXME: Sometimes with the tree optimizers we can get the
11452 not the last argument even though the user used the last
11453 argument. We just warn and set the arg to be the last
11454 argument so that we will get wrong-code because of
11456 warning (0, "second parameter of %<va_start%> not last named argument");
11459 /* Undefined by C99 7.15.1.4p4 (va_start):
11460 "If the parameter parmN is declared with the register storage
11461 class, with a function or array type, or with a type that is
11462 not compatible with the type that results after application of
11463 the default argument promotions, the behavior is undefined."
11465 else if (DECL_REGISTER (arg))
11466 warning (0, "undefined behaviour when second parameter of "
11467 "%<va_start%> is declared with %<register%> storage");
11469 /* We want to verify the second parameter just once before the tree
11470 optimizers are run and then avoid keeping it in the tree,
11471 as otherwise we could warn even for correct code like:
11472 void foo (int i, ...)
11473 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11475 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11477 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11483 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11484 ORIG may be null if this is a 2-argument call. We don't attempt to
11485 simplify calls with more than 3 arguments.
11487 Return NULL_TREE if no simplification was possible, otherwise return the
11488 simplified form of the call as a tree. If IGNORED is true, it means that
11489 the caller does not use the returned value of the function. */
11492 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11493 tree orig, int ignored)
11496 const char *fmt_str = NULL;
11498 /* Verify the required arguments in the original call. We deal with two
11499 types of sprintf() calls: 'sprintf (str, fmt)' and
11500 'sprintf (dest, "%s", orig)'. */
11501 if (!validate_arg (dest, POINTER_TYPE)
11502 || !validate_arg (fmt, POINTER_TYPE))
11504 if (orig && !validate_arg (orig, POINTER_TYPE))
11507 /* Check whether the format is a literal string constant. */
11508 fmt_str = c_getstr (fmt);
11509 if (fmt_str == NULL)
11513 retval = NULL_TREE;
11515 if (!init_target_chars ())
11518 /* If the format doesn't contain % args or %%, use strcpy. */
11519 if (strchr (fmt_str, target_percent) == NULL)
11521 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11526 /* Don't optimize sprintf (buf, "abc", ptr++). */
11530 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11531 'format' is known to contain no % formats. */
11532 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11534 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11537 /* If the format is "%s", use strcpy if the result isn't used. */
11538 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11541 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11546 /* Don't crash on sprintf (str1, "%s"). */
11550 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11553 retval = c_strlen (orig, 1);
11554 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11557 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11560 if (call && retval)
11562 retval = fold_convert_loc
11563 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11565 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11571 /* Expand a call EXP to __builtin_object_size. */
11574 expand_builtin_object_size (tree exp)
11577 int object_size_type;
11578 tree fndecl = get_callee_fndecl (exp);
11580 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11582 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11584 expand_builtin_trap ();
11588 ost = CALL_EXPR_ARG (exp, 1);
11591 if (TREE_CODE (ost) != INTEGER_CST
11592 || tree_int_cst_sgn (ost) < 0
11593 || compare_tree_int (ost, 3) > 0)
11595 error ("%Klast argument of %D is not integer constant between 0 and 3",
11597 expand_builtin_trap ();
11601 object_size_type = tree_low_cst (ost, 0);
11603 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11606 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11607 FCODE is the BUILT_IN_* to use.
11608 Return NULL_RTX if we failed; the caller should emit a normal call,
11609 otherwise try to get the result in TARGET, if convenient (and in
11610 mode MODE if that's convenient). */
11613 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11614 enum built_in_function fcode)
11616 tree dest, src, len, size;
11618 if (!validate_arglist (exp,
11620 fcode == BUILT_IN_MEMSET_CHK
11621 ? INTEGER_TYPE : POINTER_TYPE,
11622 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11625 dest = CALL_EXPR_ARG (exp, 0);
11626 src = CALL_EXPR_ARG (exp, 1);
11627 len = CALL_EXPR_ARG (exp, 2);
11628 size = CALL_EXPR_ARG (exp, 3);
11630 if (! host_integerp (size, 1))
11633 if (host_integerp (len, 1) || integer_all_onesp (size))
11637 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11639 warning_at (tree_nonartificial_location (exp),
11640 0, "%Kcall to %D will always overflow destination buffer",
11641 exp, get_callee_fndecl (exp));
11646 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11647 mem{cpy,pcpy,move,set} is available. */
11650 case BUILT_IN_MEMCPY_CHK:
11651 fn = built_in_decls[BUILT_IN_MEMCPY];
11653 case BUILT_IN_MEMPCPY_CHK:
11654 fn = built_in_decls[BUILT_IN_MEMPCPY];
11656 case BUILT_IN_MEMMOVE_CHK:
11657 fn = built_in_decls[BUILT_IN_MEMMOVE];
11659 case BUILT_IN_MEMSET_CHK:
11660 fn = built_in_decls[BUILT_IN_MEMSET];
11669 fn = build_call_nofold (fn, 3, dest, src, len);
11670 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11671 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11672 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11674 else if (fcode == BUILT_IN_MEMSET_CHK)
11678 unsigned int dest_align
11679 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11681 /* If DEST is not a pointer type, call the normal function. */
11682 if (dest_align == 0)
11685 /* If SRC and DEST are the same (and not volatile), do nothing. */
11686 if (operand_equal_p (src, dest, 0))
11690 if (fcode != BUILT_IN_MEMPCPY_CHK)
11692 /* Evaluate and ignore LEN in case it has side-effects. */
11693 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11694 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11697 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11698 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11701 /* __memmove_chk special case. */
11702 if (fcode == BUILT_IN_MEMMOVE_CHK)
11704 unsigned int src_align
11705 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11707 if (src_align == 0)
11710 /* If src is categorized for a readonly section we can use
11711 normal __memcpy_chk. */
11712 if (readonly_data_expr (src))
11714 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11717 fn = build_call_nofold (fn, 4, dest, src, len, size);
11718 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11719 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11720 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11727 /* Emit warning if a buffer overflow is detected at compile time. */
11730 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11734 location_t loc = tree_nonartificial_location (exp);
11738 case BUILT_IN_STRCPY_CHK:
11739 case BUILT_IN_STPCPY_CHK:
11740 /* For __strcat_chk the warning will be emitted only if overflowing
11741 by at least strlen (dest) + 1 bytes. */
11742 case BUILT_IN_STRCAT_CHK:
11743 len = CALL_EXPR_ARG (exp, 1);
11744 size = CALL_EXPR_ARG (exp, 2);
11747 case BUILT_IN_STRNCAT_CHK:
11748 case BUILT_IN_STRNCPY_CHK:
11749 len = CALL_EXPR_ARG (exp, 2);
11750 size = CALL_EXPR_ARG (exp, 3);
11752 case BUILT_IN_SNPRINTF_CHK:
11753 case BUILT_IN_VSNPRINTF_CHK:
11754 len = CALL_EXPR_ARG (exp, 1);
11755 size = CALL_EXPR_ARG (exp, 3);
11758 gcc_unreachable ();
11764 if (! host_integerp (size, 1) || integer_all_onesp (size))
11769 len = c_strlen (len, 1);
11770 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11773 else if (fcode == BUILT_IN_STRNCAT_CHK)
11775 tree src = CALL_EXPR_ARG (exp, 1);
11776 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11778 src = c_strlen (src, 1);
11779 if (! src || ! host_integerp (src, 1))
11781 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11782 exp, get_callee_fndecl (exp));
11785 else if (tree_int_cst_lt (src, size))
11788 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11791 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11792 exp, get_callee_fndecl (exp));
11795 /* Emit warning if a buffer overflow is detected at compile time
11796 in __sprintf_chk/__vsprintf_chk calls. */
11799 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11801 tree size, len, fmt;
11802 const char *fmt_str;
11803 int nargs = call_expr_nargs (exp);
11805 /* Verify the required arguments in the original call. */
11809 size = CALL_EXPR_ARG (exp, 2);
11810 fmt = CALL_EXPR_ARG (exp, 3);
11812 if (! host_integerp (size, 1) || integer_all_onesp (size))
11815 /* Check whether the format is a literal string constant. */
11816 fmt_str = c_getstr (fmt);
11817 if (fmt_str == NULL)
11820 if (!init_target_chars ())
11823 /* If the format doesn't contain % args or %%, we know its size. */
11824 if (strchr (fmt_str, target_percent) == 0)
11825 len = build_int_cstu (size_type_node, strlen (fmt_str));
11826 /* If the format is "%s" and first ... argument is a string literal,
11828 else if (fcode == BUILT_IN_SPRINTF_CHK
11829 && strcmp (fmt_str, target_percent_s) == 0)
11835 arg = CALL_EXPR_ARG (exp, 4);
11836 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11839 len = c_strlen (arg, 1);
11840 if (!len || ! host_integerp (len, 1))
11846 if (! tree_int_cst_lt (len, size))
11847 warning_at (tree_nonartificial_location (exp),
11848 0, "%Kcall to %D will always overflow destination buffer",
11849 exp, get_callee_fndecl (exp));
11852 /* Emit warning if a free is called with address of a variable. */
11855 maybe_emit_free_warning (tree exp)
11857 tree arg = CALL_EXPR_ARG (exp, 0);
11860 if (TREE_CODE (arg) != ADDR_EXPR)
11863 arg = get_base_address (TREE_OPERAND (arg, 0));
11864 if (arg == NULL || INDIRECT_REF_P (arg))
11867 if (SSA_VAR_P (arg))
11868 warning_at (tree_nonartificial_location (exp),
11869 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11871 warning_at (tree_nonartificial_location (exp),
11872 0, "%Kattempt to free a non-heap object", exp);
11875 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11879 fold_builtin_object_size (tree ptr, tree ost)
11881 tree ret = NULL_TREE;
11882 int object_size_type;
11884 if (!validate_arg (ptr, POINTER_TYPE)
11885 || !validate_arg (ost, INTEGER_TYPE))
11890 if (TREE_CODE (ost) != INTEGER_CST
11891 || tree_int_cst_sgn (ost) < 0
11892 || compare_tree_int (ost, 3) > 0)
11895 object_size_type = tree_low_cst (ost, 0);
11897 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11898 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11899 and (size_t) 0 for types 2 and 3. */
11900 if (TREE_SIDE_EFFECTS (ptr))
11901 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11903 if (TREE_CODE (ptr) == ADDR_EXPR)
11904 ret = build_int_cstu (size_type_node,
11905 compute_builtin_object_size (ptr, object_size_type));
11907 else if (TREE_CODE (ptr) == SSA_NAME)
11909 unsigned HOST_WIDE_INT bytes;
11911 /* If object size is not known yet, delay folding until
11912 later. Maybe subsequent passes will help determining
11914 bytes = compute_builtin_object_size (ptr, object_size_type);
11915 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11917 ret = build_int_cstu (size_type_node, bytes);
11922 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11923 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11924 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11931 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11932 DEST, SRC, LEN, and SIZE are the arguments to the call.
11933 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11934 code of the builtin. If MAXLEN is not NULL, it is maximum length
11935 passed as third argument. */
11938 fold_builtin_memory_chk (location_t loc, tree fndecl,
11939 tree dest, tree src, tree len, tree size,
11940 tree maxlen, bool ignore,
11941 enum built_in_function fcode)
11945 if (!validate_arg (dest, POINTER_TYPE)
11946 || !validate_arg (src,
11947 (fcode == BUILT_IN_MEMSET_CHK
11948 ? INTEGER_TYPE : POINTER_TYPE))
11949 || !validate_arg (len, INTEGER_TYPE)
11950 || !validate_arg (size, INTEGER_TYPE))
11953 /* If SRC and DEST are the same (and not volatile), return DEST
11954 (resp. DEST+LEN for __mempcpy_chk). */
11955 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11957 if (fcode != BUILT_IN_MEMPCPY_CHK)
11958 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11962 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11964 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11968 if (! host_integerp (size, 1))
11971 if (! integer_all_onesp (size))
11973 if (! host_integerp (len, 1))
11975 /* If LEN is not constant, try MAXLEN too.
11976 For MAXLEN only allow optimizing into non-_ocs function
11977 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11978 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11980 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11982 /* (void) __mempcpy_chk () can be optimized into
11983 (void) __memcpy_chk (). */
11984 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11988 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11996 if (tree_int_cst_lt (size, maxlen))
12001 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12002 mem{cpy,pcpy,move,set} is available. */
12005 case BUILT_IN_MEMCPY_CHK:
12006 fn = built_in_decls[BUILT_IN_MEMCPY];
12008 case BUILT_IN_MEMPCPY_CHK:
12009 fn = built_in_decls[BUILT_IN_MEMPCPY];
12011 case BUILT_IN_MEMMOVE_CHK:
12012 fn = built_in_decls[BUILT_IN_MEMMOVE];
12014 case BUILT_IN_MEMSET_CHK:
12015 fn = built_in_decls[BUILT_IN_MEMSET];
12024 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12027 /* Fold a call to the __st[rp]cpy_chk builtin.
12028 DEST, SRC, and SIZE are the arguments to the call.
12029 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12030 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12031 strings passed as second argument. */
12034 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12035 tree src, tree size,
12036 tree maxlen, bool ignore,
12037 enum built_in_function fcode)
12041 if (!validate_arg (dest, POINTER_TYPE)
12042 || !validate_arg (src, POINTER_TYPE)
12043 || !validate_arg (size, INTEGER_TYPE))
12046 /* If SRC and DEST are the same (and not volatile), return DEST. */
12047 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12048 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12050 if (! host_integerp (size, 1))
12053 if (! integer_all_onesp (size))
12055 len = c_strlen (src, 1);
12056 if (! len || ! host_integerp (len, 1))
12058 /* If LEN is not constant, try MAXLEN too.
12059 For MAXLEN only allow optimizing into non-_ocs function
12060 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12061 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12063 if (fcode == BUILT_IN_STPCPY_CHK)
12068 /* If return value of __stpcpy_chk is ignored,
12069 optimize into __strcpy_chk. */
12070 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12074 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12077 if (! len || TREE_SIDE_EFFECTS (len))
12080 /* If c_strlen returned something, but not a constant,
12081 transform __strcpy_chk into __memcpy_chk. */
12082 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12086 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12087 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12088 build_call_expr_loc (loc, fn, 4,
12089 dest, src, len, size));
12095 if (! tree_int_cst_lt (maxlen, size))
12099 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12100 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12101 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12105 return build_call_expr_loc (loc, fn, 2, dest, src);
12108 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12109 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12110 length passed as third argument. */
12113 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12114 tree len, tree size, tree maxlen)
12118 if (!validate_arg (dest, POINTER_TYPE)
12119 || !validate_arg (src, POINTER_TYPE)
12120 || !validate_arg (len, INTEGER_TYPE)
12121 || !validate_arg (size, INTEGER_TYPE))
12124 if (! host_integerp (size, 1))
12127 if (! integer_all_onesp (size))
12129 if (! host_integerp (len, 1))
12131 /* If LEN is not constant, try MAXLEN too.
12132 For MAXLEN only allow optimizing into non-_ocs function
12133 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12134 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12140 if (tree_int_cst_lt (size, maxlen))
12144 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12145 fn = built_in_decls[BUILT_IN_STRNCPY];
12149 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12152 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12153 are the arguments to the call. */
12156 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12157 tree src, tree size)
12162 if (!validate_arg (dest, POINTER_TYPE)
12163 || !validate_arg (src, POINTER_TYPE)
12164 || !validate_arg (size, INTEGER_TYPE))
12167 p = c_getstr (src);
12168 /* If the SRC parameter is "", return DEST. */
12169 if (p && *p == '\0')
12170 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12172 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12175 /* If __builtin_strcat_chk is used, assume strcat is available. */
12176 fn = built_in_decls[BUILT_IN_STRCAT];
12180 return build_call_expr_loc (loc, fn, 2, dest, src);
12183 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12187 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12188 tree dest, tree src, tree len, tree size)
12193 if (!validate_arg (dest, POINTER_TYPE)
12194 || !validate_arg (src, POINTER_TYPE)
12195 || !validate_arg (size, INTEGER_TYPE)
12196 || !validate_arg (size, INTEGER_TYPE))
12199 p = c_getstr (src);
12200 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12201 if (p && *p == '\0')
12202 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12203 else if (integer_zerop (len))
12204 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12206 if (! host_integerp (size, 1))
12209 if (! integer_all_onesp (size))
12211 tree src_len = c_strlen (src, 1);
12213 && host_integerp (src_len, 1)
12214 && host_integerp (len, 1)
12215 && ! tree_int_cst_lt (len, src_len))
12217 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12218 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12222 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12227 /* If __builtin_strncat_chk is used, assume strncat is available. */
12228 fn = built_in_decls[BUILT_IN_STRNCAT];
12232 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12235 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12236 a normal call should be emitted rather than expanding the function
12237 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12240 fold_builtin_sprintf_chk (location_t loc, tree exp,
12241 enum built_in_function fcode)
12243 tree dest, size, len, fn, fmt, flag;
12244 const char *fmt_str;
12245 int nargs = call_expr_nargs (exp);
12247 /* Verify the required arguments in the original call. */
12250 dest = CALL_EXPR_ARG (exp, 0);
12251 if (!validate_arg (dest, POINTER_TYPE))
12253 flag = CALL_EXPR_ARG (exp, 1);
12254 if (!validate_arg (flag, INTEGER_TYPE))
12256 size = CALL_EXPR_ARG (exp, 2);
12257 if (!validate_arg (size, INTEGER_TYPE))
12259 fmt = CALL_EXPR_ARG (exp, 3);
12260 if (!validate_arg (fmt, POINTER_TYPE))
12263 if (! host_integerp (size, 1))
12268 if (!init_target_chars ())
12271 /* Check whether the format is a literal string constant. */
12272 fmt_str = c_getstr (fmt);
12273 if (fmt_str != NULL)
12275 /* If the format doesn't contain % args or %%, we know the size. */
12276 if (strchr (fmt_str, target_percent) == 0)
12278 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12279 len = build_int_cstu (size_type_node, strlen (fmt_str));
12281 /* If the format is "%s" and first ... argument is a string literal,
12282 we know the size too. */
12283 else if (fcode == BUILT_IN_SPRINTF_CHK
12284 && strcmp (fmt_str, target_percent_s) == 0)
12290 arg = CALL_EXPR_ARG (exp, 4);
12291 if (validate_arg (arg, POINTER_TYPE))
12293 len = c_strlen (arg, 1);
12294 if (! len || ! host_integerp (len, 1))
12301 if (! integer_all_onesp (size))
12303 if (! len || ! tree_int_cst_lt (len, size))
12307 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12308 or if format doesn't contain % chars or is "%s". */
12309 if (! integer_zerop (flag))
12311 if (fmt_str == NULL)
12313 if (strchr (fmt_str, target_percent) != NULL
12314 && strcmp (fmt_str, target_percent_s))
12318 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12319 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12320 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12324 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12327 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12328 a normal call should be emitted rather than expanding the function
12329 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12330 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12331 passed as second argument. */
12334 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12335 enum built_in_function fcode)
12337 tree dest, size, len, fn, fmt, flag;
12338 const char *fmt_str;
12340 /* Verify the required arguments in the original call. */
12341 if (call_expr_nargs (exp) < 5)
12343 dest = CALL_EXPR_ARG (exp, 0);
12344 if (!validate_arg (dest, POINTER_TYPE))
12346 len = CALL_EXPR_ARG (exp, 1);
12347 if (!validate_arg (len, INTEGER_TYPE))
12349 flag = CALL_EXPR_ARG (exp, 2);
12350 if (!validate_arg (flag, INTEGER_TYPE))
12352 size = CALL_EXPR_ARG (exp, 3);
12353 if (!validate_arg (size, INTEGER_TYPE))
12355 fmt = CALL_EXPR_ARG (exp, 4);
12356 if (!validate_arg (fmt, POINTER_TYPE))
12359 if (! host_integerp (size, 1))
12362 if (! integer_all_onesp (size))
12364 if (! host_integerp (len, 1))
12366 /* If LEN is not constant, try MAXLEN too.
12367 For MAXLEN only allow optimizing into non-_ocs function
12368 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12369 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12375 if (tree_int_cst_lt (size, maxlen))
12379 if (!init_target_chars ())
12382 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12383 or if format doesn't contain % chars or is "%s". */
12384 if (! integer_zerop (flag))
12386 fmt_str = c_getstr (fmt);
12387 if (fmt_str == NULL)
12389 if (strchr (fmt_str, target_percent) != NULL
12390 && strcmp (fmt_str, target_percent_s))
12394 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12396 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12397 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12401 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12404 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12405 FMT and ARG are the arguments to the call; we don't fold cases with
12406 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12408 Return NULL_TREE if no simplification was possible, otherwise return the
12409 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12410 code of the function to be simplified. */
12413 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12414 tree arg, bool ignore,
12415 enum built_in_function fcode)
12417 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12418 const char *fmt_str = NULL;
12420 /* If the return value is used, don't do the transformation. */
12424 /* Verify the required arguments in the original call. */
12425 if (!validate_arg (fmt, POINTER_TYPE))
12428 /* Check whether the format is a literal string constant. */
12429 fmt_str = c_getstr (fmt);
12430 if (fmt_str == NULL)
12433 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12435 /* If we're using an unlocked function, assume the other
12436 unlocked functions exist explicitly. */
12437 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12438 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12442 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12443 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12446 if (!init_target_chars ())
12449 if (strcmp (fmt_str, target_percent_s) == 0
12450 || strchr (fmt_str, target_percent) == NULL)
12454 if (strcmp (fmt_str, target_percent_s) == 0)
12456 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12459 if (!arg || !validate_arg (arg, POINTER_TYPE))
12462 str = c_getstr (arg);
12468 /* The format specifier doesn't contain any '%' characters. */
12469 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12475 /* If the string was "", printf does nothing. */
12476 if (str[0] == '\0')
12477 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12479 /* If the string has length of 1, call putchar. */
12480 if (str[1] == '\0')
12482 /* Given printf("c"), (where c is any one character,)
12483 convert "c"[0] to an int and pass that to the replacement
12485 newarg = build_int_cst (NULL_TREE, str[0]);
12487 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12491 /* If the string was "string\n", call puts("string"). */
12492 size_t len = strlen (str);
12493 if ((unsigned char)str[len - 1] == target_newline)
12495 /* Create a NUL-terminated string that's one char shorter
12496 than the original, stripping off the trailing '\n'. */
12497 char *newstr = XALLOCAVEC (char, len);
12498 memcpy (newstr, str, len - 1);
12499 newstr[len - 1] = 0;
12501 newarg = build_string_literal (len, newstr);
12503 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12506 /* We'd like to arrange to call fputs(string,stdout) here,
12507 but we need stdout and don't have a way to get it yet. */
12512 /* The other optimizations can be done only on the non-va_list variants. */
12513 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12516 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12517 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12519 if (!arg || !validate_arg (arg, POINTER_TYPE))
12522 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12525 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12526 else if (strcmp (fmt_str, target_percent_c) == 0)
12528 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12531 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12537 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12540 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12541 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12542 more than 3 arguments, and ARG may be null in the 2-argument case.
12544 Return NULL_TREE if no simplification was possible, otherwise return the
12545 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12546 code of the function to be simplified. */
12549 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12550 tree fmt, tree arg, bool ignore,
12551 enum built_in_function fcode)
12553 tree fn_fputc, fn_fputs, call = NULL_TREE;
12554 const char *fmt_str = NULL;
12556 /* If the return value is used, don't do the transformation. */
12560 /* Verify the required arguments in the original call. */
12561 if (!validate_arg (fp, POINTER_TYPE))
12563 if (!validate_arg (fmt, POINTER_TYPE))
12566 /* Check whether the format is a literal string constant. */
12567 fmt_str = c_getstr (fmt);
12568 if (fmt_str == NULL)
12571 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12573 /* If we're using an unlocked function, assume the other
12574 unlocked functions exist explicitly. */
12575 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12576 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12580 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12581 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12584 if (!init_target_chars ())
12587 /* If the format doesn't contain % args or %%, use strcpy. */
12588 if (strchr (fmt_str, target_percent) == NULL)
12590 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12594 /* If the format specifier was "", fprintf does nothing. */
12595 if (fmt_str[0] == '\0')
12597 /* If FP has side-effects, just wait until gimplification is
12599 if (TREE_SIDE_EFFECTS (fp))
12602 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12605 /* When "string" doesn't contain %, replace all cases of
12606 fprintf (fp, string) with fputs (string, fp). The fputs
12607 builtin will take care of special cases like length == 1. */
12609 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12612 /* The other optimizations can be done only on the non-va_list variants. */
12613 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12616 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12617 else if (strcmp (fmt_str, target_percent_s) == 0)
12619 if (!arg || !validate_arg (arg, POINTER_TYPE))
12622 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12625 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12626 else if (strcmp (fmt_str, target_percent_c) == 0)
12628 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12631 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12636 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12639 /* Initialize format string characters in the target charset. */
12642 init_target_chars (void)
12647 target_newline = lang_hooks.to_target_charset ('\n');
12648 target_percent = lang_hooks.to_target_charset ('%');
12649 target_c = lang_hooks.to_target_charset ('c');
12650 target_s = lang_hooks.to_target_charset ('s');
12651 if (target_newline == 0 || target_percent == 0 || target_c == 0
12655 target_percent_c[0] = target_percent;
12656 target_percent_c[1] = target_c;
12657 target_percent_c[2] = '\0';
12659 target_percent_s[0] = target_percent;
12660 target_percent_s[1] = target_s;
12661 target_percent_s[2] = '\0';
12663 target_percent_s_newline[0] = target_percent;
12664 target_percent_s_newline[1] = target_s;
12665 target_percent_s_newline[2] = target_newline;
12666 target_percent_s_newline[3] = '\0';
12673 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12674 and no overflow/underflow occurred. INEXACT is true if M was not
12675 exactly calculated. TYPE is the tree type for the result. This
12676 function assumes that you cleared the MPFR flags and then
12677 calculated M to see if anything subsequently set a flag prior to
12678 entering this function. Return NULL_TREE if any checks fail. */
12681 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12683 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12684 overflow/underflow occurred. If -frounding-math, proceed iff the
12685 result of calling FUNC was exact. */
12686 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12687 && (!flag_rounding_math || !inexact))
12689 REAL_VALUE_TYPE rr;
12691 real_from_mpfr (&rr, m, type, GMP_RNDN);
12692 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12693 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12694 but the mpft_t is not, then we underflowed in the
12696 if (real_isfinite (&rr)
12697 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12699 REAL_VALUE_TYPE rmode;
12701 real_convert (&rmode, TYPE_MODE (type), &rr);
12702 /* Proceed iff the specified mode can hold the value. */
12703 if (real_identical (&rmode, &rr))
12704 return build_real (type, rmode);
12710 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12711 number and no overflow/underflow occurred. INEXACT is true if M
12712 was not exactly calculated. TYPE is the tree type for the result.
12713 This function assumes that you cleared the MPFR flags and then
12714 calculated M to see if anything subsequently set a flag prior to
12715 entering this function. Return NULL_TREE if any checks fail, if
12716 FORCE_CONVERT is true, then bypass the checks. */
12719 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12721 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12722 overflow/underflow occurred. If -frounding-math, proceed iff the
12723 result of calling FUNC was exact. */
12725 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12726 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12727 && (!flag_rounding_math || !inexact)))
12729 REAL_VALUE_TYPE re, im;
12731 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12732 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12733 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12734 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12735 but the mpft_t is not, then we underflowed in the
12738 || (real_isfinite (&re) && real_isfinite (&im)
12739 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12740 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12742 REAL_VALUE_TYPE re_mode, im_mode;
12744 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12745 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12746 /* Proceed iff the specified mode can hold the value. */
12748 || (real_identical (&re_mode, &re)
12749 && real_identical (&im_mode, &im)))
12750 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12751 build_real (TREE_TYPE (type), im_mode));
12757 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12758 FUNC on it and return the resulting value as a tree with type TYPE.
12759 If MIN and/or MAX are not NULL, then the supplied ARG must be
12760 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12761 acceptable values, otherwise they are not. The mpfr precision is
12762 set to the precision of TYPE. We assume that function FUNC returns
12763 zero if the result could be calculated exactly within the requested
12767 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12768 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12771 tree result = NULL_TREE;
12775 /* To proceed, MPFR must exactly represent the target floating point
12776 format, which only happens when the target base equals two. */
12777 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12778 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12780 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12782 if (real_isfinite (ra)
12783 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12784 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12786 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12787 const int prec = fmt->p;
12788 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12792 mpfr_init2 (m, prec);
12793 mpfr_from_real (m, ra, GMP_RNDN);
12794 mpfr_clear_flags ();
12795 inexact = func (m, m, rnd);
12796 result = do_mpfr_ckconv (m, type, inexact);
12804 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12805 FUNC on it and return the resulting value as a tree with type TYPE.
12806 The mpfr precision is set to the precision of TYPE. We assume that
12807 function FUNC returns zero if the result could be calculated
12808 exactly within the requested precision. */
12811 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12812 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12814 tree result = NULL_TREE;
12819 /* To proceed, MPFR must exactly represent the target floating point
12820 format, which only happens when the target base equals two. */
12821 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12822 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12823 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12825 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12826 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12828 if (real_isfinite (ra1) && real_isfinite (ra2))
12830 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12831 const int prec = fmt->p;
12832 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12836 mpfr_inits2 (prec, m1, m2, NULL);
12837 mpfr_from_real (m1, ra1, GMP_RNDN);
12838 mpfr_from_real (m2, ra2, GMP_RNDN);
12839 mpfr_clear_flags ();
12840 inexact = func (m1, m1, m2, rnd);
12841 result = do_mpfr_ckconv (m1, type, inexact);
12842 mpfr_clears (m1, m2, NULL);
12849 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12850 FUNC on it and return the resulting value as a tree with type TYPE.
12851 The mpfr precision is set to the precision of TYPE. We assume that
12852 function FUNC returns zero if the result could be calculated
12853 exactly within the requested precision. */
12856 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12857 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12859 tree result = NULL_TREE;
12865 /* To proceed, MPFR must exactly represent the target floating point
12866 format, which only happens when the target base equals two. */
12867 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12868 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12869 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12870 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12872 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12873 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12874 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12876 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12878 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12879 const int prec = fmt->p;
12880 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12884 mpfr_inits2 (prec, m1, m2, m3, NULL);
12885 mpfr_from_real (m1, ra1, GMP_RNDN);
12886 mpfr_from_real (m2, ra2, GMP_RNDN);
12887 mpfr_from_real (m3, ra3, GMP_RNDN);
12888 mpfr_clear_flags ();
12889 inexact = func (m1, m1, m2, m3, rnd);
12890 result = do_mpfr_ckconv (m1, type, inexact);
12891 mpfr_clears (m1, m2, m3, NULL);
12898 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12899 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12900 If ARG_SINP and ARG_COSP are NULL then the result is returned
12901 as a complex value.
12902 The type is taken from the type of ARG and is used for setting the
12903 precision of the calculation and results. */
12906 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12908 tree const type = TREE_TYPE (arg);
12909 tree result = NULL_TREE;
12913 /* To proceed, MPFR must exactly represent the target floating point
12914 format, which only happens when the target base equals two. */
12915 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12916 && TREE_CODE (arg) == REAL_CST
12917 && !TREE_OVERFLOW (arg))
12919 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12921 if (real_isfinite (ra))
12923 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12924 const int prec = fmt->p;
12925 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12926 tree result_s, result_c;
12930 mpfr_inits2 (prec, m, ms, mc, NULL);
12931 mpfr_from_real (m, ra, GMP_RNDN);
12932 mpfr_clear_flags ();
12933 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12934 result_s = do_mpfr_ckconv (ms, type, inexact);
12935 result_c = do_mpfr_ckconv (mc, type, inexact);
12936 mpfr_clears (m, ms, mc, NULL);
12937 if (result_s && result_c)
12939 /* If we are to return in a complex value do so. */
12940 if (!arg_sinp && !arg_cosp)
12941 return build_complex (build_complex_type (type),
12942 result_c, result_s);
12944 /* Dereference the sin/cos pointer arguments. */
12945 arg_sinp = build_fold_indirect_ref (arg_sinp);
12946 arg_cosp = build_fold_indirect_ref (arg_cosp);
12947 /* Proceed if valid pointer type were passed in. */
12948 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12949 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12951 /* Set the values. */
12952 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12954 TREE_SIDE_EFFECTS (result_s) = 1;
12955 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12957 TREE_SIDE_EFFECTS (result_c) = 1;
12958 /* Combine the assignments into a compound expr. */
12959 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12960 result_s, result_c));
12968 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12969 two-argument mpfr order N Bessel function FUNC on them and return
12970 the resulting value as a tree with type TYPE. The mpfr precision
12971 is set to the precision of TYPE. We assume that function FUNC
12972 returns zero if the result could be calculated exactly within the
12973 requested precision. */
12975 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12976 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12977 const REAL_VALUE_TYPE *min, bool inclusive)
12979 tree result = NULL_TREE;
12984 /* To proceed, MPFR must exactly represent the target floating point
12985 format, which only happens when the target base equals two. */
12986 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12987 && host_integerp (arg1, 0)
12988 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12990 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12991 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12994 && real_isfinite (ra)
12995 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12997 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12998 const int prec = fmt->p;
12999 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13003 mpfr_init2 (m, prec);
13004 mpfr_from_real (m, ra, GMP_RNDN);
13005 mpfr_clear_flags ();
13006 inexact = func (m, n, m, rnd);
13007 result = do_mpfr_ckconv (m, type, inexact);
13015 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13016 the pointer *(ARG_QUO) and return the result. The type is taken
13017 from the type of ARG0 and is used for setting the precision of the
13018 calculation and results. */
13021 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13023 tree const type = TREE_TYPE (arg0);
13024 tree result = NULL_TREE;
13029 /* To proceed, MPFR must exactly represent the target floating point
13030 format, which only happens when the target base equals two. */
13031 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13032 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13033 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13035 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13036 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13038 if (real_isfinite (ra0) && real_isfinite (ra1))
13040 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13041 const int prec = fmt->p;
13042 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13047 mpfr_inits2 (prec, m0, m1, NULL);
13048 mpfr_from_real (m0, ra0, GMP_RNDN);
13049 mpfr_from_real (m1, ra1, GMP_RNDN);
13050 mpfr_clear_flags ();
13051 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13052 /* Remquo is independent of the rounding mode, so pass
13053 inexact=0 to do_mpfr_ckconv(). */
13054 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13055 mpfr_clears (m0, m1, NULL);
13058 /* MPFR calculates quo in the host's long so it may
13059 return more bits in quo than the target int can hold
13060 if sizeof(host long) > sizeof(target int). This can
13061 happen even for native compilers in LP64 mode. In
13062 these cases, modulo the quo value with the largest
13063 number that the target int can hold while leaving one
13064 bit for the sign. */
13065 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13066 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13068 /* Dereference the quo pointer argument. */
13069 arg_quo = build_fold_indirect_ref (arg_quo);
13070 /* Proceed iff a valid pointer type was passed in. */
13071 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13073 /* Set the value. */
13074 tree result_quo = fold_build2 (MODIFY_EXPR,
13075 TREE_TYPE (arg_quo), arg_quo,
13076 build_int_cst (NULL, integer_quo));
13077 TREE_SIDE_EFFECTS (result_quo) = 1;
13078 /* Combine the quo assignment with the rem. */
13079 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13080 result_quo, result_rem));
13088 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13089 resulting value as a tree with type TYPE. The mpfr precision is
13090 set to the precision of TYPE. We assume that this mpfr function
13091 returns zero if the result could be calculated exactly within the
13092 requested precision. In addition, the integer pointer represented
13093 by ARG_SG will be dereferenced and set to the appropriate signgam
13097 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13099 tree result = NULL_TREE;
13103 /* To proceed, MPFR must exactly represent the target floating point
13104 format, which only happens when the target base equals two. Also
13105 verify ARG is a constant and that ARG_SG is an int pointer. */
13106 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13107 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13108 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13109 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13111 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13113 /* In addition to NaN and Inf, the argument cannot be zero or a
13114 negative integer. */
13115 if (real_isfinite (ra)
13116 && ra->cl != rvc_zero
13117 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13119 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13120 const int prec = fmt->p;
13121 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13126 mpfr_init2 (m, prec);
13127 mpfr_from_real (m, ra, GMP_RNDN);
13128 mpfr_clear_flags ();
13129 inexact = mpfr_lgamma (m, &sg, m, rnd);
13130 result_lg = do_mpfr_ckconv (m, type, inexact);
13136 /* Dereference the arg_sg pointer argument. */
13137 arg_sg = build_fold_indirect_ref (arg_sg);
13138 /* Assign the signgam value into *arg_sg. */
13139 result_sg = fold_build2 (MODIFY_EXPR,
13140 TREE_TYPE (arg_sg), arg_sg,
13141 build_int_cst (NULL, sg));
13142 TREE_SIDE_EFFECTS (result_sg) = 1;
13143 /* Combine the signgam assignment with the lgamma result. */
13144 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13145 result_sg, result_lg));
13153 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13154 function FUNC on it and return the resulting value as a tree with
13155 type TYPE. The mpfr precision is set to the precision of TYPE. We
13156 assume that function FUNC returns zero if the result could be
13157 calculated exactly within the requested precision. */
13160 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13162 tree result = NULL_TREE;
13166 /* To proceed, MPFR must exactly represent the target floating point
13167 format, which only happens when the target base equals two. */
13168 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13169 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13170 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13172 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13173 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13175 if (real_isfinite (re) && real_isfinite (im))
13177 const struct real_format *const fmt =
13178 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13179 const int prec = fmt->p;
13180 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13181 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13185 mpc_init2 (m, prec);
13186 mpfr_from_real (mpc_realref(m), re, rnd);
13187 mpfr_from_real (mpc_imagref(m), im, rnd);
13188 mpfr_clear_flags ();
13189 inexact = func (m, m, crnd);
13190 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13198 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13199 mpc function FUNC on it and return the resulting value as a tree
13200 with type TYPE. The mpfr precision is set to the precision of
13201 TYPE. We assume that function FUNC returns zero if the result
13202 could be calculated exactly within the requested precision. If
13203 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13204 in the arguments and/or results. */
13207 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13208 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13210 tree result = NULL_TREE;
13215 /* To proceed, MPFR must exactly represent the target floating point
13216 format, which only happens when the target base equals two. */
13217 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13218 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13219 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13221 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13223 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13224 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13225 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13226 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13229 || (real_isfinite (re0) && real_isfinite (im0)
13230 && real_isfinite (re1) && real_isfinite (im1)))
13232 const struct real_format *const fmt =
13233 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13234 const int prec = fmt->p;
13235 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13236 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13240 mpc_init2 (m0, prec);
13241 mpc_init2 (m1, prec);
13242 mpfr_from_real (mpc_realref(m0), re0, rnd);
13243 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13244 mpfr_from_real (mpc_realref(m1), re1, rnd);
13245 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13246 mpfr_clear_flags ();
13247 inexact = func (m0, m0, m1, crnd);
13248 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13258 The functions below provide an alternate interface for folding
13259 builtin function calls presented as GIMPLE_CALL statements rather
13260 than as CALL_EXPRs. The folded result is still expressed as a
13261 tree. There is too much code duplication in the handling of
13262 varargs functions, and a more intrusive re-factoring would permit
13263 better sharing of code between the tree and statement-based
13264 versions of these functions. */
13266 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13267 along with N new arguments specified as the "..." parameters. SKIP
13268 is the number of arguments in STMT to be omitted. This function is used
13269 to do varargs-to-varargs transformations. */
13272 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13274 int oldnargs = gimple_call_num_args (stmt);
13275 int nargs = oldnargs - skip + n;
13276 tree fntype = TREE_TYPE (fndecl);
13277 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13281 location_t loc = gimple_location (stmt);
13283 buffer = XALLOCAVEC (tree, nargs);
13285 for (i = 0; i < n; i++)
13286 buffer[i] = va_arg (ap, tree);
13288 for (j = skip; j < oldnargs; j++, i++)
13289 buffer[i] = gimple_call_arg (stmt, j);
13291 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13294 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13295 a normal call should be emitted rather than expanding the function
13296 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13299 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13301 tree dest, size, len, fn, fmt, flag;
13302 const char *fmt_str;
13303 int nargs = gimple_call_num_args (stmt);
13305 /* Verify the required arguments in the original call. */
13308 dest = gimple_call_arg (stmt, 0);
13309 if (!validate_arg (dest, POINTER_TYPE))
13311 flag = gimple_call_arg (stmt, 1);
13312 if (!validate_arg (flag, INTEGER_TYPE))
13314 size = gimple_call_arg (stmt, 2);
13315 if (!validate_arg (size, INTEGER_TYPE))
13317 fmt = gimple_call_arg (stmt, 3);
13318 if (!validate_arg (fmt, POINTER_TYPE))
13321 if (! host_integerp (size, 1))
13326 if (!init_target_chars ())
13329 /* Check whether the format is a literal string constant. */
13330 fmt_str = c_getstr (fmt);
13331 if (fmt_str != NULL)
13333 /* If the format doesn't contain % args or %%, we know the size. */
13334 if (strchr (fmt_str, target_percent) == 0)
13336 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13337 len = build_int_cstu (size_type_node, strlen (fmt_str));
13339 /* If the format is "%s" and first ... argument is a string literal,
13340 we know the size too. */
13341 else if (fcode == BUILT_IN_SPRINTF_CHK
13342 && strcmp (fmt_str, target_percent_s) == 0)
13348 arg = gimple_call_arg (stmt, 4);
13349 if (validate_arg (arg, POINTER_TYPE))
13351 len = c_strlen (arg, 1);
13352 if (! len || ! host_integerp (len, 1))
13359 if (! integer_all_onesp (size))
13361 if (! len || ! tree_int_cst_lt (len, size))
13365 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13366 or if format doesn't contain % chars or is "%s". */
13367 if (! integer_zerop (flag))
13369 if (fmt_str == NULL)
13371 if (strchr (fmt_str, target_percent) != NULL
13372 && strcmp (fmt_str, target_percent_s))
13376 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13377 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13378 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13382 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13385 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13386 a normal call should be emitted rather than expanding the function
13387 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13388 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13389 passed as second argument. */
13392 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13393 enum built_in_function fcode)
13395 tree dest, size, len, fn, fmt, flag;
13396 const char *fmt_str;
13398 /* Verify the required arguments in the original call. */
13399 if (gimple_call_num_args (stmt) < 5)
13401 dest = gimple_call_arg (stmt, 0);
13402 if (!validate_arg (dest, POINTER_TYPE))
13404 len = gimple_call_arg (stmt, 1);
13405 if (!validate_arg (len, INTEGER_TYPE))
13407 flag = gimple_call_arg (stmt, 2);
13408 if (!validate_arg (flag, INTEGER_TYPE))
13410 size = gimple_call_arg (stmt, 3);
13411 if (!validate_arg (size, INTEGER_TYPE))
13413 fmt = gimple_call_arg (stmt, 4);
13414 if (!validate_arg (fmt, POINTER_TYPE))
13417 if (! host_integerp (size, 1))
13420 if (! integer_all_onesp (size))
13422 if (! host_integerp (len, 1))
13424 /* If LEN is not constant, try MAXLEN too.
13425 For MAXLEN only allow optimizing into non-_ocs function
13426 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13427 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13433 if (tree_int_cst_lt (size, maxlen))
13437 if (!init_target_chars ())
13440 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13441 or if format doesn't contain % chars or is "%s". */
13442 if (! integer_zerop (flag))
13444 fmt_str = c_getstr (fmt);
13445 if (fmt_str == NULL)
13447 if (strchr (fmt_str, target_percent) != NULL
13448 && strcmp (fmt_str, target_percent_s))
13452 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13454 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13455 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13459 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13462 /* Builtins with folding operations that operate on "..." arguments
13463 need special handling; we need to store the arguments in a convenient
13464 data structure before attempting any folding. Fortunately there are
13465 only a few builtins that fall into this category. FNDECL is the
13466 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13467 result of the function call is ignored. */
13470 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13471 bool ignore ATTRIBUTE_UNUSED)
13473 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13474 tree ret = NULL_TREE;
13478 case BUILT_IN_SPRINTF_CHK:
13479 case BUILT_IN_VSPRINTF_CHK:
13480 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13483 case BUILT_IN_SNPRINTF_CHK:
13484 case BUILT_IN_VSNPRINTF_CHK:
13485 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13492 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13493 TREE_NO_WARNING (ret) = 1;
13499 /* A wrapper function for builtin folding that prevents warnings for
13500 "statement without effect" and the like, caused by removing the
13501 call node earlier than the warning is generated. */
13504 fold_call_stmt (gimple stmt, bool ignore)
13506 tree ret = NULL_TREE;
13507 tree fndecl = gimple_call_fndecl (stmt);
13508 location_t loc = gimple_location (stmt);
13510 && TREE_CODE (fndecl) == FUNCTION_DECL
13511 && DECL_BUILT_IN (fndecl)
13512 && !gimple_call_va_arg_pack_p (stmt))
13514 int nargs = gimple_call_num_args (stmt);
13516 if (avoid_folding_inline_builtin (fndecl))
13518 /* FIXME: Don't use a list in this interface. */
13519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13521 tree arglist = NULL_TREE;
13523 for (i = nargs - 1; i >= 0; i--)
13524 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13525 return targetm.fold_builtin (fndecl, arglist, ignore);
13529 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13531 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13533 for (i = 0; i < nargs; i++)
13534 args[i] = gimple_call_arg (stmt, i);
13535 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13538 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13541 /* Propagate location information from original call to
13542 expansion of builtin. Otherwise things like
13543 maybe_emit_chk_warning, that operate on the expansion
13544 of a builtin, will use the wrong location information. */
13545 if (gimple_has_location (stmt))
13547 tree realret = ret;
13548 if (TREE_CODE (ret) == NOP_EXPR)
13549 realret = TREE_OPERAND (ret, 0);
13550 if (CAN_HAVE_LOCATION_P (realret)
13551 && !EXPR_HAS_LOCATION (realret))
13552 SET_EXPR_LOCATION (realret, loc);
13562 /* Look up the function in built_in_decls that corresponds to DECL
13563 and set ASMSPEC as its user assembler name. DECL must be a
13564 function decl that declares a builtin. */
13567 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13570 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13571 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13574 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13575 set_user_assembler_name (builtin, asmspec);
13576 switch (DECL_FUNCTION_CODE (decl))
13578 case BUILT_IN_MEMCPY:
13579 init_block_move_fn (asmspec);
13580 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13582 case BUILT_IN_MEMSET:
13583 init_block_clear_fn (asmspec);
13584 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13586 case BUILT_IN_MEMMOVE:
13587 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13589 case BUILT_IN_MEMCMP:
13590 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13592 case BUILT_IN_ABORT:
13593 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);