1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
457 loc = input_location;
459 src = string_constant (src, &offset_node);
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
473 for (i = 0; i < max; i++)
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
491 else if (! host_integerp (offset_node, 0))
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
526 src = string_constant (src, &offset_node);
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 c_readstr (const char *str, enum machine_mode mode)
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
654 SETUP_FRAME_ADDRESSES ();
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
796 if (i == ARRAY_SIZE (elim_regs))
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1270 apply_args_size (void)
1272 static int size = -1;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1303 apply_args_mode[regno] = VOIDmode;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1313 apply_result_size (void)
1315 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1384 expand_builtin_apply_args_1 (void)
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1463 temp = expand_builtin_apply_args_1 ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1515 do_pending_stack_adjust ();
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 /* Find the CALL insn we just emitted, and attach the register usage
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 /* Restore the return value and note that each value is used. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1687 call_fusage = get_insns ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1910 expand_call (exp, target, 0);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2127 return expand_call (exp, target, target == const0_rtx);
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2218 gcc_assert (result);
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2240 target = expand_call (exp, target, target == const0_rtx);
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2353 expand_builtin_sincos (tree exp)
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2359 location_t loc = EXPR_LOCATION (exp);
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2388 /* Move target1 and target2 to the memory locations indicated
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2404 tree fndecl = get_callee_fndecl (exp);
2406 enum machine_mode mode;
2408 location_t loc = EXPR_LOCATION (exp);
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2430 else if (TARGET_HAS_SINCOS)
2432 tree call, fn = NULL_TREE;
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2477 const char *name = NULL;
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2521 SET_EXPR_LOCATION (fn, loc);
2524 #define build_call_nofold(...) \
2525 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2527 /* Expand a call to one of the builtin rounding functions gcc defines
2528 as an extension (lfloor and lceil). As these are gcc extensions we
2529 do not need to worry about setting errno to EDOM.
2530 If expanding via optab fails, lower expression to (int)(floor(x)).
2531 EXP is the expression that is a call to the builtin function;
2532 if convenient, the result should be placed in TARGET. */
2535 expand_builtin_int_roundingfn (tree exp, rtx target)
2537 convert_optab builtin_optab;
2538 rtx op0, insns, tmp;
2539 tree fndecl = get_callee_fndecl (exp);
2540 enum built_in_function fallback_fn;
2541 tree fallback_fndecl;
2542 enum machine_mode mode;
2545 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2548 arg = CALL_EXPR_ARG (exp, 0);
2550 switch (DECL_FUNCTION_CODE (fndecl))
2552 CASE_FLT_FN (BUILT_IN_LCEIL):
2553 CASE_FLT_FN (BUILT_IN_LLCEIL):
2554 builtin_optab = lceil_optab;
2555 fallback_fn = BUILT_IN_CEIL;
2558 CASE_FLT_FN (BUILT_IN_LFLOOR):
2559 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2560 builtin_optab = lfloor_optab;
2561 fallback_fn = BUILT_IN_FLOOR;
2568 /* Make a suitable register to place result in. */
2569 mode = TYPE_MODE (TREE_TYPE (exp));
2571 target = gen_reg_rtx (mode);
2573 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2574 need to expand the argument again. This way, we will not perform
2575 side-effects more the once. */
2576 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2578 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2582 /* Compute into TARGET. */
2583 if (expand_sfix_optab (target, op0, builtin_optab))
2585 /* Output the entire sequence. */
2586 insns = get_insns ();
2592 /* If we were unable to expand via the builtin, stop the sequence
2593 (without outputting the insns). */
2596 /* Fall back to floating point rounding optab. */
2597 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2599 /* For non-C99 targets we may end up without a fallback fndecl here
2600 if the user called __builtin_lfloor directly. In this case emit
2601 a call to the floor/ceil variants nevertheless. This should result
2602 in the best user experience for not full C99 targets. */
2603 if (fallback_fndecl == NULL_TREE)
2606 const char *name = NULL;
2608 switch (DECL_FUNCTION_CODE (fndecl))
2610 case BUILT_IN_LCEIL:
2611 case BUILT_IN_LLCEIL:
2614 case BUILT_IN_LCEILF:
2615 case BUILT_IN_LLCEILF:
2618 case BUILT_IN_LCEILL:
2619 case BUILT_IN_LLCEILL:
2622 case BUILT_IN_LFLOOR:
2623 case BUILT_IN_LLFLOOR:
2626 case BUILT_IN_LFLOORF:
2627 case BUILT_IN_LLFLOORF:
2630 case BUILT_IN_LFLOORL:
2631 case BUILT_IN_LLFLOORL:
2638 fntype = build_function_type_list (TREE_TYPE (arg),
2639 TREE_TYPE (arg), NULL_TREE);
2640 fallback_fndecl = build_fn_decl (name, fntype);
2643 exp = build_call_nofold (fallback_fndecl, 1, arg);
2645 tmp = expand_normal (exp);
2647 /* Truncate the result of floating point optab to integer
2648 via expand_fix (). */
2649 target = gen_reg_rtx (mode);
2650 expand_fix (target, tmp, 0);
2655 /* Expand a call to one of the builtin math functions doing integer
2657 Return 0 if a normal call should be emitted rather than expanding the
2658 function in-line. EXP is the expression that is a call to the builtin
2659 function; if convenient, the result should be placed in TARGET. */
2662 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2664 convert_optab builtin_optab;
2666 tree fndecl = get_callee_fndecl (exp);
2668 enum machine_mode mode;
2670 /* There's no easy way to detect the case we need to set EDOM. */
2671 if (flag_errno_math)
2674 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2677 arg = CALL_EXPR_ARG (exp, 0);
2679 switch (DECL_FUNCTION_CODE (fndecl))
2681 CASE_FLT_FN (BUILT_IN_LRINT):
2682 CASE_FLT_FN (BUILT_IN_LLRINT):
2683 builtin_optab = lrint_optab; break;
2684 CASE_FLT_FN (BUILT_IN_LROUND):
2685 CASE_FLT_FN (BUILT_IN_LLROUND):
2686 builtin_optab = lround_optab; break;
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2694 target = gen_reg_rtx (mode);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns) and call to the library function
2716 with the stabilized argument list. */
2719 target = expand_call (exp, target, target == const0_rtx);
2724 /* To evaluate powi(x,n), the floating point value x raised to the
2725 constant integer exponent n, we use a hybrid algorithm that
2726 combines the "window method" with look-up tables. For an
2727 introduction to exponentiation algorithms and "addition chains",
2728 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2729 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2730 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2731 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2733 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2734 multiplications to inline before calling the system library's pow
2735 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2736 so this default never requires calling pow, powf or powl. */
2738 #ifndef POWI_MAX_MULTS
2739 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2742 /* The size of the "optimal power tree" lookup table. All
2743 exponents less than this value are simply looked up in the
2744 powi_table below. This threshold is also used to size the
2745 cache of pseudo registers that hold intermediate results. */
2746 #define POWI_TABLE_SIZE 256
2748 /* The size, in bits of the window, used in the "window method"
2749 exponentiation algorithm. This is equivalent to a radix of
2750 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2751 #define POWI_WINDOW_SIZE 3
2753 /* The following table is an efficient representation of an
2754 "optimal power tree". For each value, i, the corresponding
2755 value, j, in the table states than an optimal evaluation
2756 sequence for calculating pow(x,i) can be found by evaluating
2757 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2758 100 integers is given in Knuth's "Seminumerical algorithms". */
2760 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2762 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2763 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2764 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2765 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2766 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2767 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2768 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2769 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2770 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2771 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2772 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2773 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2774 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2775 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2776 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2777 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2778 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2779 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2780 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2781 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2782 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2783 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2784 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2785 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2786 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2787 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2788 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2789 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2790 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2791 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2792 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2793 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2797 /* Return the number of multiplications required to calculate
2798 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2799 subroutine of powi_cost. CACHE is an array indicating
2800 which exponents have already been calculated. */
2803 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2805 /* If we've already calculated this exponent, then this evaluation
2806 doesn't require any additional multiplications. */
2811 return powi_lookup_cost (n - powi_table[n], cache)
2812 + powi_lookup_cost (powi_table[n], cache) + 1;
2815 /* Return the number of multiplications required to calculate
2816 powi(x,n) for an arbitrary x, given the exponent N. This
2817 function needs to be kept in sync with expand_powi below. */
2820 powi_cost (HOST_WIDE_INT n)
2822 bool cache[POWI_TABLE_SIZE];
2823 unsigned HOST_WIDE_INT digit;
2824 unsigned HOST_WIDE_INT val;
2830 /* Ignore the reciprocal when calculating the cost. */
2831 val = (n < 0) ? -n : n;
2833 /* Initialize the exponent cache. */
2834 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2839 while (val >= POWI_TABLE_SIZE)
2843 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2844 result += powi_lookup_cost (digit, cache)
2845 + POWI_WINDOW_SIZE + 1;
2846 val >>= POWI_WINDOW_SIZE;
2855 return result + powi_lookup_cost (val, cache);
2858 /* Recursive subroutine of expand_powi. This function takes the array,
2859 CACHE, of already calculated exponents and an exponent N and returns
2860 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2863 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2865 unsigned HOST_WIDE_INT digit;
2869 if (n < POWI_TABLE_SIZE)
2874 target = gen_reg_rtx (mode);
2877 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2878 op1 = expand_powi_1 (mode, powi_table[n], cache);
2882 target = gen_reg_rtx (mode);
2883 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2884 op0 = expand_powi_1 (mode, n - digit, cache);
2885 op1 = expand_powi_1 (mode, digit, cache);
2889 target = gen_reg_rtx (mode);
2890 op0 = expand_powi_1 (mode, n >> 1, cache);
2894 result = expand_mult (mode, op0, op1, target, 0);
2895 if (result != target)
2896 emit_move_insn (target, result);
2900 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2901 floating point operand in mode MODE, and N is the exponent. This
2902 function needs to be kept in sync with powi_cost above. */
2905 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2907 rtx cache[POWI_TABLE_SIZE];
2911 return CONST1_RTX (mode);
2913 memset (cache, 0, sizeof (cache));
2916 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2918 /* If the original exponent was negative, reciprocate the result. */
2920 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2921 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2926 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2927 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2928 if we can simplify it. */
2930 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2933 if (TREE_CODE (arg1) == REAL_CST
2934 && !TREE_OVERFLOW (arg1)
2935 && flag_unsafe_math_optimizations)
2937 enum machine_mode mode = TYPE_MODE (type);
2938 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2939 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2940 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2941 tree op = NULL_TREE;
2945 /* Optimize pow (x, 0.5) into sqrt. */
2946 if (REAL_VALUES_EQUAL (c, dconsthalf))
2947 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2951 REAL_VALUE_TYPE dconst1_4 = dconst1;
2952 REAL_VALUE_TYPE dconst3_4;
2953 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2955 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2956 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2958 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2959 machines that a builtin sqrt instruction is smaller than a
2960 call to pow with 0.25, so do this optimization even if
2962 if (REAL_VALUES_EQUAL (c, dconst1_4))
2964 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2965 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2968 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2969 are optimizing for space. */
2970 else if (optimize_insn_for_speed_p ()
2971 && !TREE_SIDE_EFFECTS (arg0)
2972 && REAL_VALUES_EQUAL (c, dconst3_4))
2974 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2975 tree sqrt2 = builtin_save_expr (sqrt1);
2976 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2977 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2982 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2983 cbrt/sqrts instead of pow (x, 1./6.). */
2985 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2987 /* First try 1/3. */
2988 REAL_VALUE_TYPE dconst1_3
2989 = real_value_truncate (mode, dconst_third ());
2991 if (REAL_VALUES_EQUAL (c, dconst1_3))
2992 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2995 else if (optimize_insn_for_speed_p ())
2997 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2998 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3000 if (REAL_VALUES_EQUAL (c, dconst1_6))
3002 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3003 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3009 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3015 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3016 a normal call should be emitted rather than expanding the function
3017 in-line. EXP is the expression that is a call to the builtin
3018 function; if convenient, the result should be placed in TARGET. */
3021 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3025 tree type = TREE_TYPE (exp);
3026 REAL_VALUE_TYPE cint, c, c2;
3029 enum machine_mode mode = TYPE_MODE (type);
3031 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3034 arg0 = CALL_EXPR_ARG (exp, 0);
3035 arg1 = CALL_EXPR_ARG (exp, 1);
3037 if (TREE_CODE (arg1) != REAL_CST
3038 || TREE_OVERFLOW (arg1))
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3041 /* Handle constant exponents. */
3043 /* For integer valued exponents we can expand to an optimal multiplication
3044 sequence using expand_powi. */
3045 c = TREE_REAL_CST (arg1);
3046 n = real_to_integer (&c);
3047 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3048 if (real_identical (&c, &cint)
3049 && ((n >= -1 && n <= 2)
3050 || (flag_unsafe_math_optimizations
3051 && optimize_insn_for_speed_p ()
3052 && powi_cost (n) <= POWI_MAX_MULTS)))
3054 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3057 op = force_reg (mode, op);
3058 op = expand_powi (op, mode, n);
3063 narg0 = builtin_save_expr (arg0);
3065 /* If the exponent is not integer valued, check if it is half of an integer.
3066 In this case we can expand to sqrt (x) * x**(n/2). */
3067 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3068 if (fn != NULL_TREE)
3070 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3071 n = real_to_integer (&c2);
3072 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3073 if (real_identical (&c2, &cint)
3074 && ((flag_unsafe_math_optimizations
3075 && optimize_insn_for_speed_p ()
3076 && powi_cost (n/2) <= POWI_MAX_MULTS)
3077 /* Even the c == 0.5 case cannot be done unconditionally
3078 when we need to preserve signed zeros, as
3079 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3080 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3081 /* For c == 1.5 we can assume that x * sqrt (x) is always
3082 smaller than pow (x, 1.5) if sqrt will not be expanded
3085 && (optab_handler (sqrt_optab, mode)->insn_code
3086 != CODE_FOR_nothing))))
3088 tree call_expr = build_call_nofold (fn, 1, narg0);
3089 /* Use expand_expr in case the newly built call expression
3090 was folded to a non-call. */
3091 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3094 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3095 op2 = force_reg (mode, op2);
3096 op2 = expand_powi (op2, mode, abs (n / 2));
3097 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3098 0, OPTAB_LIB_WIDEN);
3099 /* If the original exponent was negative, reciprocate the
3102 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3103 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3109 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3111 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3116 /* Try if the exponent is a third of an integer. In this case
3117 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3118 different from pow (x, 1./3.) due to rounding and behavior
3119 with negative x we need to constrain this transformation to
3120 unsafe math and positive x or finite math. */
3121 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3123 && flag_unsafe_math_optimizations
3124 && (tree_expr_nonnegative_p (arg0)
3125 || !HONOR_NANS (mode)))
3127 REAL_VALUE_TYPE dconst3;
3128 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3129 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3130 real_round (&c2, mode, &c2);
3131 n = real_to_integer (&c2);
3132 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3133 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3134 real_convert (&c2, mode, &c2);
3135 if (real_identical (&c2, &c)
3136 && ((optimize_insn_for_speed_p ()
3137 && powi_cost (n/3) <= POWI_MAX_MULTS)
3140 tree call_expr = build_call_nofold (fn, 1,narg0);
3141 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3142 if (abs (n) % 3 == 2)
3143 op = expand_simple_binop (mode, MULT, op, op, op,
3144 0, OPTAB_LIB_WIDEN);
3147 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3148 op2 = force_reg (mode, op2);
3149 op2 = expand_powi (op2, mode, abs (n / 3));
3150 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3151 0, OPTAB_LIB_WIDEN);
3152 /* If the original exponent was negative, reciprocate the
3155 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3156 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3162 /* Fall back to optab expansion. */
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3166 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3167 a normal call should be emitted rather than expanding the function
3168 in-line. EXP is the expression that is a call to the builtin
3169 function; if convenient, the result should be placed in TARGET. */
3172 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3176 enum machine_mode mode;
3177 enum machine_mode mode2;
3179 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3182 arg0 = CALL_EXPR_ARG (exp, 0);
3183 arg1 = CALL_EXPR_ARG (exp, 1);
3184 mode = TYPE_MODE (TREE_TYPE (exp));
3186 /* Handle constant power. */
3188 if (TREE_CODE (arg1) == INTEGER_CST
3189 && !TREE_OVERFLOW (arg1))
3191 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3193 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3194 Otherwise, check the number of multiplications required. */
3195 if ((TREE_INT_CST_HIGH (arg1) == 0
3196 || TREE_INT_CST_HIGH (arg1) == -1)
3197 && ((n >= -1 && n <= 2)
3198 || (optimize_insn_for_speed_p ()
3199 && powi_cost (n) <= POWI_MAX_MULTS)))
3201 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3202 op0 = force_reg (mode, op0);
3203 return expand_powi (op0, mode, n);
3207 /* Emit a libcall to libgcc. */
3209 /* Mode of the 2nd argument must match that of an int. */
3210 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3212 if (target == NULL_RTX)
3213 target = gen_reg_rtx (mode);
3215 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3216 if (GET_MODE (op0) != mode)
3217 op0 = convert_to_mode (mode, op0, 0);
3218 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3219 if (GET_MODE (op1) != mode2)
3220 op1 = convert_to_mode (mode2, op1, 0);
3222 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3223 target, LCT_CONST, mode, 2,
3224 op0, mode, op1, mode2);
3229 /* Expand expression EXP which is a call to the strlen builtin. Return
3230 NULL_RTX if we failed the caller should emit a normal call, otherwise
3231 try to get the result in TARGET, if convenient. */
3234 expand_builtin_strlen (tree exp, rtx target,
3235 enum machine_mode target_mode)
3237 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3243 tree src = CALL_EXPR_ARG (exp, 0);
3244 rtx result, src_reg, char_rtx, before_strlen;
3245 enum machine_mode insn_mode = target_mode, char_mode;
3246 enum insn_code icode = CODE_FOR_nothing;
3249 /* If the length can be computed at compile-time, return it. */
3250 len = c_strlen (src, 0);
3252 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3254 /* If the length can be computed at compile-time and is constant
3255 integer, but there are side-effects in src, evaluate
3256 src for side-effects, then return len.
3257 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3258 can be optimized into: i++; x = 3; */
3259 len = c_strlen (src, 1);
3260 if (len && TREE_CODE (len) == INTEGER_CST)
3262 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3263 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3266 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3268 /* If SRC is not a pointer type, don't do this operation inline. */
3272 /* Bail out if we can't compute strlen in the right mode. */
3273 while (insn_mode != VOIDmode)
3275 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3276 if (icode != CODE_FOR_nothing)
3279 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3281 if (insn_mode == VOIDmode)
3284 /* Make a place to write the result of the instruction. */
3288 && GET_MODE (result) == insn_mode
3289 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3290 result = gen_reg_rtx (insn_mode);
3292 /* Make a place to hold the source address. We will not expand
3293 the actual source until we are sure that the expansion will
3294 not fail -- there are trees that cannot be expanded twice. */
3295 src_reg = gen_reg_rtx (Pmode);
3297 /* Mark the beginning of the strlen sequence so we can emit the
3298 source operand later. */
3299 before_strlen = get_last_insn ();
3301 char_rtx = const0_rtx;
3302 char_mode = insn_data[(int) icode].operand[2].mode;
3303 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3305 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3307 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3308 char_rtx, GEN_INT (align));
3313 /* Now that we are assured of success, expand the source. */
3315 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3317 emit_move_insn (src_reg, pat);
3322 emit_insn_after (pat, before_strlen);
3324 emit_insn_before (pat, get_insns ());
3326 /* Return the value in the proper mode for this function. */
3327 if (GET_MODE (result) == target_mode)
3329 else if (target != 0)
3330 convert_move (target, result, 0);
3332 target = convert_to_mode (target_mode, result, 0);
3338 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3339 bytes from constant string DATA + OFFSET and return it as target
3343 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3344 enum machine_mode mode)
3346 const char *str = (const char *) data;
3348 gcc_assert (offset >= 0
3349 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3350 <= strlen (str) + 1));
3352 return c_readstr (str + offset, mode);
3355 /* Expand a call EXP to the memcpy builtin.
3356 Return NULL_RTX if we failed, the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3361 expand_builtin_memcpy (tree exp, rtx target)
3363 if (!validate_arglist (exp,
3364 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3368 tree dest = CALL_EXPR_ARG (exp, 0);
3369 tree src = CALL_EXPR_ARG (exp, 1);
3370 tree len = CALL_EXPR_ARG (exp, 2);
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3375 rtx dest_mem, src_mem, dest_addr, len_rtx;
3376 HOST_WIDE_INT expected_size = -1;
3377 unsigned int expected_align = 0;
3379 /* If DEST is not a pointer type, call the normal function. */
3380 if (dest_align == 0)
3383 /* If either SRC is not a pointer type, don't do this
3384 operation in-line. */
3388 if (currently_expanding_gimple_stmt)
3389 stringop_block_profile (currently_expanding_gimple_stmt,
3390 &expected_align, &expected_size);
3392 if (expected_align < dest_align)
3393 expected_align = dest_align;
3394 dest_mem = get_memory_rtx (dest, len);
3395 set_mem_align (dest_mem, dest_align);
3396 len_rtx = expand_normal (len);
3397 src_str = c_getstr (src);
3399 /* If SRC is a string constant and block move would be done
3400 by pieces, we can avoid loading the string from memory
3401 and only stored the computed constants. */
3403 && CONST_INT_P (len_rtx)
3404 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3405 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3409 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3410 builtin_memcpy_read_str,
3411 CONST_CAST (char *, src_str),
3412 dest_align, false, 0);
3413 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3414 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 src_mem = get_memory_rtx (src, len);
3419 set_mem_align (src_mem, src_align);
3421 /* Copy word part most expediently. */
3422 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3423 CALL_EXPR_TAILCALL (exp)
3424 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3425 expected_align, expected_size);
3429 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3430 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3436 /* Expand a call EXP to the mempcpy builtin.
3437 Return NULL_RTX if we failed; the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). If ENDP is 0 return the
3440 destination pointer, if ENDP is 1 return the end pointer ala
3441 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3455 return expand_builtin_mempcpy_args (dest, src, len,
3456 target, mode, /*endp=*/ 1);
3460 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3461 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3462 so that this can also be called without constructing an actual CALL_EXPR.
3463 The other arguments and return value are the same as for
3464 expand_builtin_mempcpy. */
3467 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3468 rtx target, enum machine_mode mode, int endp)
3470 /* If return value is ignored, transform mempcpy into memcpy. */
3471 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3473 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3474 tree result = build_call_nofold (fn, 3, dest, src, len);
3475 return expand_expr (result, target, mode, EXPAND_NORMAL);
3479 const char *src_str;
3480 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3481 unsigned int dest_align
3482 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3483 rtx dest_mem, src_mem, len_rtx;
3485 /* If either SRC or DEST is not a pointer type, don't do this
3486 operation in-line. */
3487 if (dest_align == 0 || src_align == 0)
3490 /* If LEN is not constant, call the normal function. */
3491 if (! host_integerp (len, 1))
3494 len_rtx = expand_normal (len);
3495 src_str = c_getstr (src);
3497 /* If SRC is a string constant and block move would be done
3498 by pieces, we can avoid loading the string from memory
3499 and only stored the computed constants. */
3501 && CONST_INT_P (len_rtx)
3502 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3503 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3504 CONST_CAST (char *, src_str),
3507 dest_mem = get_memory_rtx (dest, len);
3508 set_mem_align (dest_mem, dest_align);
3509 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3510 builtin_memcpy_read_str,
3511 CONST_CAST (char *, src_str),
3512 dest_align, false, endp);
3513 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3514 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3518 if (CONST_INT_P (len_rtx)
3519 && can_move_by_pieces (INTVAL (len_rtx),
3520 MIN (dest_align, src_align)))
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 src_mem = get_memory_rtx (src, len);
3525 set_mem_align (src_mem, src_align);
3526 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3527 MIN (dest_align, src_align), endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3538 # define HAVE_movstr 0
3539 # define CODE_FOR_movstr CODE_FOR_nothing
3542 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3543 we failed, the caller should emit a normal call, otherwise try to
3544 get the result in TARGET, if convenient. If ENDP is 0 return the
3545 destination pointer, if ENDP is 1 return the end pointer ala
3546 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3550 expand_movstr (tree dest, tree src, rtx target, int endp)
3556 const struct insn_data * data;
3561 dest_mem = get_memory_rtx (dest, NULL);
3562 src_mem = get_memory_rtx (src, NULL);
3565 target = force_reg (Pmode, XEXP (dest_mem, 0));
3566 dest_mem = replace_equiv_address (dest_mem, target);
3567 end = gen_reg_rtx (Pmode);
3571 if (target == 0 || target == const0_rtx)
3573 end = gen_reg_rtx (Pmode);
3581 data = insn_data + CODE_FOR_movstr;
3583 if (data->operand[0].mode != VOIDmode)
3584 end = gen_lowpart (data->operand[0].mode, end);
3586 insn = data->genfun (end, dest_mem, src_mem);
3592 /* movstr is supposed to set end to the address of the NUL
3593 terminator. If the caller requested a mempcpy-like return value,
3595 if (endp == 1 && target != const0_rtx)
3597 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3598 emit_move_insn (target, force_operand (tem, NULL_RTX));
3604 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3605 NULL_RTX if we failed the caller should emit a normal call, otherwise
3606 try to get the result in TARGET, if convenient (and in mode MODE if that's
3610 expand_builtin_strcpy (tree exp, rtx target)
3612 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3614 tree dest = CALL_EXPR_ARG (exp, 0);
3615 tree src = CALL_EXPR_ARG (exp, 1);
3616 return expand_builtin_strcpy_args (dest, src, target);
3621 /* Helper function to do the actual work for expand_builtin_strcpy. The
3622 arguments to the builtin_strcpy call DEST and SRC are broken out
3623 so that this can also be called without constructing an actual CALL_EXPR.
3624 The other arguments and return value are the same as for
3625 expand_builtin_strcpy. */
3628 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3630 return expand_movstr (dest, src, target, /*endp=*/0);
3633 /* Expand a call EXP to the stpcpy builtin.
3634 Return NULL_RTX if we failed the caller should emit a normal call,
3635 otherwise try to get the result in TARGET, if convenient (and in
3636 mode MODE if that's convenient). */
3639 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3642 location_t loc = EXPR_LOCATION (exp);
3644 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3647 dst = CALL_EXPR_ARG (exp, 0);
3648 src = CALL_EXPR_ARG (exp, 1);
3650 /* If return value is ignored, transform stpcpy into strcpy. */
3651 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3653 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3654 tree result = build_call_nofold (fn, 2, dst, src);
3655 return expand_expr (result, target, mode, EXPAND_NORMAL);
3662 /* Ensure we get an actual string whose length can be evaluated at
3663 compile-time, not an expression containing a string. This is
3664 because the latter will potentially produce pessimized code
3665 when used to produce the return value. */
3666 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3667 return expand_movstr (dst, src, target, /*endp=*/2);
3669 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3670 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3671 target, mode, /*endp=*/2);
3676 if (TREE_CODE (len) == INTEGER_CST)
3678 rtx len_rtx = expand_normal (len);
3680 if (CONST_INT_P (len_rtx))
3682 ret = expand_builtin_strcpy_args (dst, src, target);
3688 if (mode != VOIDmode)
3689 target = gen_reg_rtx (mode);
3691 target = gen_reg_rtx (GET_MODE (ret));
3693 if (GET_MODE (target) != GET_MODE (ret))
3694 ret = gen_lowpart (GET_MODE (target), ret);
3696 ret = plus_constant (ret, INTVAL (len_rtx));
3697 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3705 return expand_movstr (dst, src, target, /*endp=*/2);
3709 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3710 bytes from constant string DATA + OFFSET and return it as target
3714 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3715 enum machine_mode mode)
3717 const char *str = (const char *) data;
3719 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3722 return c_readstr (str + offset, mode);
3725 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3726 NULL_RTX if we failed the caller should emit a normal call. */
3729 expand_builtin_strncpy (tree exp, rtx target)
3731 location_t loc = EXPR_LOCATION (exp);
3733 if (validate_arglist (exp,
3734 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3736 tree dest = CALL_EXPR_ARG (exp, 0);
3737 tree src = CALL_EXPR_ARG (exp, 1);
3738 tree len = CALL_EXPR_ARG (exp, 2);
3739 tree slen = c_strlen (src, 1);
3741 /* We must be passed a constant len and src parameter. */
3742 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3745 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3747 /* We're required to pad with trailing zeros if the requested
3748 len is greater than strlen(s2)+1. In that case try to
3749 use store_by_pieces, if it fails, punt. */
3750 if (tree_int_cst_lt (slen, len))
3752 unsigned int dest_align
3753 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3754 const char *p = c_getstr (src);
3757 if (!p || dest_align == 0 || !host_integerp (len, 1)
3758 || !can_store_by_pieces (tree_low_cst (len, 1),
3759 builtin_strncpy_read_str,
3760 CONST_CAST (char *, p),
3764 dest_mem = get_memory_rtx (dest, len);
3765 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3766 builtin_strncpy_read_str,
3767 CONST_CAST (char *, p), dest_align, false, 0);
3768 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3769 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3776 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3777 bytes from constant string DATA + OFFSET and return it as target
3781 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3782 enum machine_mode mode)
3784 const char *c = (const char *) data;
3785 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3787 memset (p, *c, GET_MODE_SIZE (mode));
3789 return c_readstr (p, mode);
3792 /* Callback routine for store_by_pieces. Return the RTL of a register
3793 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3794 char value given in the RTL register data. For example, if mode is
3795 4 bytes wide, return the RTL for 0x01010101*data. */
3798 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3799 enum machine_mode mode)
3805 size = GET_MODE_SIZE (mode);
3809 p = XALLOCAVEC (char, size);
3810 memset (p, 1, size);
3811 coeff = c_readstr (p, mode);
3813 target = convert_to_mode (mode, (rtx) data, 1);
3814 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3815 return force_reg (mode, target);
3818 /* Expand expression EXP, which is a call to the memset builtin. Return
3819 NULL_RTX if we failed the caller should emit a normal call, otherwise
3820 try to get the result in TARGET, if convenient (and in mode MODE if that's
3824 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3826 if (!validate_arglist (exp,
3827 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3831 tree dest = CALL_EXPR_ARG (exp, 0);
3832 tree val = CALL_EXPR_ARG (exp, 1);
3833 tree len = CALL_EXPR_ARG (exp, 2);
3834 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3838 /* Helper function to do the actual work for expand_builtin_memset. The
3839 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3840 so that this can also be called without constructing an actual CALL_EXPR.
3841 The other arguments and return value are the same as for
3842 expand_builtin_memset. */
3845 expand_builtin_memset_args (tree dest, tree val, tree len,
3846 rtx target, enum machine_mode mode, tree orig_exp)
3849 enum built_in_function fcode;
3851 unsigned int dest_align;
3852 rtx dest_mem, dest_addr, len_rtx;
3853 HOST_WIDE_INT expected_size = -1;
3854 unsigned int expected_align = 0;
3856 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3858 /* If DEST is not a pointer type, don't do this operation in-line. */
3859 if (dest_align == 0)
3862 if (currently_expanding_gimple_stmt)
3863 stringop_block_profile (currently_expanding_gimple_stmt,
3864 &expected_align, &expected_size);
3866 if (expected_align < dest_align)
3867 expected_align = dest_align;
3869 /* If the LEN parameter is zero, return DEST. */
3870 if (integer_zerop (len))
3872 /* Evaluate and ignore VAL in case it has side-effects. */
3873 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3874 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3877 /* Stabilize the arguments in case we fail. */
3878 dest = builtin_save_expr (dest);
3879 val = builtin_save_expr (val);
3880 len = builtin_save_expr (len);
3882 len_rtx = expand_normal (len);
3883 dest_mem = get_memory_rtx (dest, len);
3885 if (TREE_CODE (val) != INTEGER_CST)
3889 val_rtx = expand_normal (val);
3890 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3893 /* Assume that we can memset by pieces if we can store
3894 * the coefficients by pieces (in the required modes).
3895 * We can't pass builtin_memset_gen_str as that emits RTL. */
3897 if (host_integerp (len, 1)
3898 && can_store_by_pieces (tree_low_cst (len, 1),
3899 builtin_memset_read_str, &c, dest_align,
3902 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3904 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3905 builtin_memset_gen_str, val_rtx, dest_align,
3908 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3909 dest_align, expected_align,
3913 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3914 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3918 if (target_char_cast (val, &c))
3923 if (host_integerp (len, 1)
3924 && can_store_by_pieces (tree_low_cst (len, 1),
3925 builtin_memset_read_str, &c, dest_align,
3927 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3928 builtin_memset_read_str, &c, dest_align, true, 0);
3929 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3930 dest_align, expected_align,
3934 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3935 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3939 set_mem_align (dest_mem, dest_align);
3940 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3941 CALL_EXPR_TAILCALL (orig_exp)
3942 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3943 expected_align, expected_size);
3947 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3948 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3954 fndecl = get_callee_fndecl (orig_exp);
3955 fcode = DECL_FUNCTION_CODE (fndecl);
3956 if (fcode == BUILT_IN_MEMSET)
3957 fn = build_call_nofold (fndecl, 3, dest, val, len);
3958 else if (fcode == BUILT_IN_BZERO)
3959 fn = build_call_nofold (fndecl, 2, dest, len);
3962 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3963 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3964 return expand_call (fn, target, target == const0_rtx);
3967 /* Expand expression EXP, which is a call to the bzero builtin. Return
3968 NULL_RTX if we failed the caller should emit a normal call. */
3971 expand_builtin_bzero (tree exp)
3974 location_t loc = EXPR_LOCATION (exp);
3976 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3979 dest = CALL_EXPR_ARG (exp, 0);
3980 size = CALL_EXPR_ARG (exp, 1);
3982 /* New argument list transforming bzero(ptr x, int y) to
3983 memset(ptr x, int 0, size_t y). This is done this way
3984 so that if it isn't expanded inline, we fallback to
3985 calling bzero instead of memset. */
3987 return expand_builtin_memset_args (dest, integer_zero_node,
3988 fold_convert_loc (loc, sizetype, size),
3989 const0_rtx, VOIDmode, exp);
3992 /* Expand expression EXP, which is a call to the memcmp built-in function.
3993 Return NULL_RTX if we failed and the
3994 caller should emit a normal call, otherwise try to get the result in
3995 TARGET, if convenient (and in mode MODE, if that's convenient). */
3998 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3999 ATTRIBUTE_UNUSED enum machine_mode mode)
4001 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4003 if (!validate_arglist (exp,
4004 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4007 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4009 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4012 tree arg1 = CALL_EXPR_ARG (exp, 0);
4013 tree arg2 = CALL_EXPR_ARG (exp, 1);
4014 tree len = CALL_EXPR_ARG (exp, 2);
4017 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4019 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4020 enum machine_mode insn_mode;
4022 #ifdef HAVE_cmpmemsi
4024 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4027 #ifdef HAVE_cmpstrnsi
4029 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4034 /* If we don't have POINTER_TYPE, call the function. */
4035 if (arg1_align == 0 || arg2_align == 0)
4038 /* Make a place to write the result of the instruction. */
4041 && REG_P (result) && GET_MODE (result) == insn_mode
4042 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4043 result = gen_reg_rtx (insn_mode);
4045 arg1_rtx = get_memory_rtx (arg1, len);
4046 arg2_rtx = get_memory_rtx (arg2, len);
4047 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4049 /* Set MEM_SIZE as appropriate. */
4050 if (CONST_INT_P (arg3_rtx))
4052 set_mem_size (arg1_rtx, arg3_rtx);
4053 set_mem_size (arg2_rtx, arg3_rtx);
4056 #ifdef HAVE_cmpmemsi
4058 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4059 GEN_INT (MIN (arg1_align, arg2_align)));
4062 #ifdef HAVE_cmpstrnsi
4064 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4065 GEN_INT (MIN (arg1_align, arg2_align)));
4073 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4074 TYPE_MODE (integer_type_node), 3,
4075 XEXP (arg1_rtx, 0), Pmode,
4076 XEXP (arg2_rtx, 0), Pmode,
4077 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4078 TYPE_UNSIGNED (sizetype)),
4079 TYPE_MODE (sizetype));
4081 /* Return the value in the proper mode for this function. */
4082 mode = TYPE_MODE (TREE_TYPE (exp));
4083 if (GET_MODE (result) == mode)
4085 else if (target != 0)
4087 convert_move (target, result, 0);
4091 return convert_to_mode (mode, result, 0);
4098 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4099 if we failed the caller should emit a normal call, otherwise try to get
4100 the result in TARGET, if convenient. */
4103 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4105 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4108 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4109 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4110 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4112 rtx arg1_rtx, arg2_rtx;
4113 rtx result, insn = NULL_RTX;
4115 tree arg1 = CALL_EXPR_ARG (exp, 0);
4116 tree arg2 = CALL_EXPR_ARG (exp, 1);
4119 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4121 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4123 /* If we don't have POINTER_TYPE, call the function. */
4124 if (arg1_align == 0 || arg2_align == 0)
4127 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4128 arg1 = builtin_save_expr (arg1);
4129 arg2 = builtin_save_expr (arg2);
4131 arg1_rtx = get_memory_rtx (arg1, NULL);
4132 arg2_rtx = get_memory_rtx (arg2, NULL);
4134 #ifdef HAVE_cmpstrsi
4135 /* Try to call cmpstrsi. */
4138 enum machine_mode insn_mode
4139 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4141 /* Make a place to write the result of the instruction. */
4144 && REG_P (result) && GET_MODE (result) == insn_mode
4145 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4146 result = gen_reg_rtx (insn_mode);
4148 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4149 GEN_INT (MIN (arg1_align, arg2_align)));
4152 #ifdef HAVE_cmpstrnsi
4153 /* Try to determine at least one length and call cmpstrnsi. */
4154 if (!insn && HAVE_cmpstrnsi)
4159 enum machine_mode insn_mode
4160 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4161 tree len1 = c_strlen (arg1, 1);
4162 tree len2 = c_strlen (arg2, 1);
4165 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4167 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4169 /* If we don't have a constant length for the first, use the length
4170 of the second, if we know it. We don't require a constant for
4171 this case; some cost analysis could be done if both are available
4172 but neither is constant. For now, assume they're equally cheap,
4173 unless one has side effects. If both strings have constant lengths,
4180 else if (TREE_SIDE_EFFECTS (len1))
4182 else if (TREE_SIDE_EFFECTS (len2))
4184 else if (TREE_CODE (len1) != INTEGER_CST)
4186 else if (TREE_CODE (len2) != INTEGER_CST)
4188 else if (tree_int_cst_lt (len1, len2))
4193 /* If both arguments have side effects, we cannot optimize. */
4194 if (!len || TREE_SIDE_EFFECTS (len))
4197 arg3_rtx = expand_normal (len);
4199 /* Make a place to write the result of the instruction. */
4202 && REG_P (result) && GET_MODE (result) == insn_mode
4203 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4204 result = gen_reg_rtx (insn_mode);
4206 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4207 GEN_INT (MIN (arg1_align, arg2_align)));
4213 enum machine_mode mode;
4216 /* Return the value in the proper mode for this function. */
4217 mode = TYPE_MODE (TREE_TYPE (exp));
4218 if (GET_MODE (result) == mode)
4221 return convert_to_mode (mode, result, 0);
4222 convert_move (target, result, 0);
4226 /* Expand the library call ourselves using a stabilized argument
4227 list to avoid re-evaluating the function's arguments twice. */
4228 #ifdef HAVE_cmpstrnsi
4231 fndecl = get_callee_fndecl (exp);
4232 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4233 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4234 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4235 return expand_call (fn, target, target == const0_rtx);
4241 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4242 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4243 the result in TARGET, if convenient. */
4246 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4247 ATTRIBUTE_UNUSED enum machine_mode mode)
4249 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4251 if (!validate_arglist (exp,
4252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4255 /* If c_strlen can determine an expression for one of the string
4256 lengths, and it doesn't have side effects, then emit cmpstrnsi
4257 using length MIN(strlen(string)+1, arg3). */
4258 #ifdef HAVE_cmpstrnsi
4261 tree len, len1, len2;
4262 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4265 tree arg1 = CALL_EXPR_ARG (exp, 0);
4266 tree arg2 = CALL_EXPR_ARG (exp, 1);
4267 tree arg3 = CALL_EXPR_ARG (exp, 2);
4270 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4272 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4273 enum machine_mode insn_mode
4274 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4276 len1 = c_strlen (arg1, 1);
4277 len2 = c_strlen (arg2, 1);
4280 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4282 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4284 /* If we don't have a constant length for the first, use the length
4285 of the second, if we know it. We don't require a constant for
4286 this case; some cost analysis could be done if both are available
4287 but neither is constant. For now, assume they're equally cheap,
4288 unless one has side effects. If both strings have constant lengths,
4295 else if (TREE_SIDE_EFFECTS (len1))
4297 else if (TREE_SIDE_EFFECTS (len2))
4299 else if (TREE_CODE (len1) != INTEGER_CST)
4301 else if (TREE_CODE (len2) != INTEGER_CST)
4303 else if (tree_int_cst_lt (len1, len2))
4308 /* If both arguments have side effects, we cannot optimize. */
4309 if (!len || TREE_SIDE_EFFECTS (len))
4312 /* The actual new length parameter is MIN(len,arg3). */
4313 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4314 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4316 /* If we don't have POINTER_TYPE, call the function. */
4317 if (arg1_align == 0 || arg2_align == 0)
4320 /* Make a place to write the result of the instruction. */
4323 && REG_P (result) && GET_MODE (result) == insn_mode
4324 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4325 result = gen_reg_rtx (insn_mode);
4327 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4328 arg1 = builtin_save_expr (arg1);
4329 arg2 = builtin_save_expr (arg2);
4330 len = builtin_save_expr (len);
4332 arg1_rtx = get_memory_rtx (arg1, len);
4333 arg2_rtx = get_memory_rtx (arg2, len);
4334 arg3_rtx = expand_normal (len);
4335 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4336 GEN_INT (MIN (arg1_align, arg2_align)));
4341 /* Return the value in the proper mode for this function. */
4342 mode = TYPE_MODE (TREE_TYPE (exp));
4343 if (GET_MODE (result) == mode)
4346 return convert_to_mode (mode, result, 0);
4347 convert_move (target, result, 0);
4351 /* Expand the library call ourselves using a stabilized argument
4352 list to avoid re-evaluating the function's arguments twice. */
4353 fndecl = get_callee_fndecl (exp);
4354 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4355 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4356 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4357 return expand_call (fn, target, target == const0_rtx);
4363 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4364 if that's convenient. */
4367 expand_builtin_saveregs (void)
4371 /* Don't do __builtin_saveregs more than once in a function.
4372 Save the result of the first call and reuse it. */
4373 if (saveregs_value != 0)
4374 return saveregs_value;
4376 /* When this function is called, it means that registers must be
4377 saved on entry to this function. So we migrate the call to the
4378 first insn of this function. */
4382 /* Do whatever the machine needs done in this case. */
4383 val = targetm.calls.expand_builtin_saveregs ();
4388 saveregs_value = val;
4390 /* Put the insns after the NOTE that starts the function. If this
4391 is inside a start_sequence, make the outer-level insn chain current, so
4392 the code is placed at the start of the function. */
4393 push_topmost_sequence ();
4394 emit_insn_after (seq, entry_of_function ());
4395 pop_topmost_sequence ();
4400 /* __builtin_args_info (N) returns word N of the arg space info
4401 for the current function. The number and meanings of words
4402 is controlled by the definition of CUMULATIVE_ARGS. */
4405 expand_builtin_args_info (tree exp)
4407 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4408 int *word_ptr = (int *) &crtl->args.info;
4410 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4412 if (call_expr_nargs (exp) != 0)
4414 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4415 error ("argument of %<__builtin_args_info%> must be constant");
4418 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4420 if (wordnum < 0 || wordnum >= nwords)
4421 error ("argument of %<__builtin_args_info%> out of range");
4423 return GEN_INT (word_ptr[wordnum]);
4427 error ("missing argument in %<__builtin_args_info%>");
4432 /* Expand a call to __builtin_next_arg. */
4435 expand_builtin_next_arg (void)
4437 /* Checking arguments is already done in fold_builtin_next_arg
4438 that must be called before this function. */
4439 return expand_binop (ptr_mode, add_optab,
4440 crtl->args.internal_arg_pointer,
4441 crtl->args.arg_offset_rtx,
4442 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4445 /* Make it easier for the backends by protecting the valist argument
4446 from multiple evaluations. */
4449 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4451 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4453 gcc_assert (vatype != NULL_TREE);
4455 if (TREE_CODE (vatype) == ARRAY_TYPE)
4457 if (TREE_SIDE_EFFECTS (valist))
4458 valist = save_expr (valist);
4460 /* For this case, the backends will be expecting a pointer to
4461 vatype, but it's possible we've actually been given an array
4462 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4464 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4466 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4467 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4476 if (! TREE_SIDE_EFFECTS (valist))
4479 pt = build_pointer_type (vatype);
4480 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4481 TREE_SIDE_EFFECTS (valist) = 1;
4484 if (TREE_SIDE_EFFECTS (valist))
4485 valist = save_expr (valist);
4486 valist = build_fold_indirect_ref_loc (loc, valist);
4492 /* The "standard" definition of va_list is void*. */
4495 std_build_builtin_va_list (void)
4497 return ptr_type_node;
4500 /* The "standard" abi va_list is va_list_type_node. */
4503 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4505 return va_list_type_node;
4508 /* The "standard" type of va_list is va_list_type_node. */
4511 std_canonical_va_list_type (tree type)
4515 if (INDIRECT_REF_P (type))
4516 type = TREE_TYPE (type);
4517 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4518 type = TREE_TYPE (type);
4519 wtype = va_list_type_node;
4521 /* Treat structure va_list types. */
4522 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4523 htype = TREE_TYPE (htype);
4524 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4526 /* If va_list is an array type, the argument may have decayed
4527 to a pointer type, e.g. by being passed to another function.
4528 In that case, unwrap both types so that we can compare the
4529 underlying records. */
4530 if (TREE_CODE (htype) == ARRAY_TYPE
4531 || POINTER_TYPE_P (htype))
4533 wtype = TREE_TYPE (wtype);
4534 htype = TREE_TYPE (htype);
4537 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4538 return va_list_type_node;
4543 /* The "standard" implementation of va_start: just assign `nextarg' to
4547 std_expand_builtin_va_start (tree valist, rtx nextarg)
4549 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4550 convert_move (va_r, nextarg, 0);
4553 /* Expand EXP, a call to __builtin_va_start. */
4556 expand_builtin_va_start (tree exp)
4560 location_t loc = EXPR_LOCATION (exp);
4562 if (call_expr_nargs (exp) < 2)
4564 error_at (loc, "too few arguments to function %<va_start%>");
4568 if (fold_builtin_next_arg (exp, true))
4571 nextarg = expand_builtin_next_arg ();
4572 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4574 if (targetm.expand_builtin_va_start)
4575 targetm.expand_builtin_va_start (valist, nextarg);
4577 std_expand_builtin_va_start (valist, nextarg);
4582 /* The "standard" implementation of va_arg: read the value from the
4583 current (padded) address and increment by the (padded) size. */
4586 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4589 tree addr, t, type_size, rounded_size, valist_tmp;
4590 unsigned HOST_WIDE_INT align, boundary;
4593 #ifdef ARGS_GROW_DOWNWARD
4594 /* All of the alignment and movement below is for args-grow-up machines.
4595 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4596 implement their own specialized gimplify_va_arg_expr routines. */
4600 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4602 type = build_pointer_type (type);
4604 align = PARM_BOUNDARY / BITS_PER_UNIT;
4605 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4607 /* When we align parameter on stack for caller, if the parameter
4608 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4609 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4610 here with caller. */
4611 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4612 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4614 boundary /= BITS_PER_UNIT;
4616 /* Hoist the valist value into a temporary for the moment. */
4617 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4619 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4620 requires greater alignment, we must perform dynamic alignment. */
4621 if (boundary > align
4622 && !integer_zerop (TYPE_SIZE (type)))
4624 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4625 fold_build2 (POINTER_PLUS_EXPR,
4627 valist_tmp, size_int (boundary - 1)));
4628 gimplify_and_add (t, pre_p);
4630 t = fold_convert (sizetype, valist_tmp);
4631 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4632 fold_convert (TREE_TYPE (valist),
4633 fold_build2 (BIT_AND_EXPR, sizetype, t,
4634 size_int (-boundary))));
4635 gimplify_and_add (t, pre_p);
4640 /* If the actual alignment is less than the alignment of the type,
4641 adjust the type accordingly so that we don't assume strict alignment
4642 when dereferencing the pointer. */
4643 boundary *= BITS_PER_UNIT;
4644 if (boundary < TYPE_ALIGN (type))
4646 type = build_variant_type_copy (type);
4647 TYPE_ALIGN (type) = boundary;
4650 /* Compute the rounded size of the type. */
4651 type_size = size_in_bytes (type);
4652 rounded_size = round_up (type_size, align);
4654 /* Reduce rounded_size so it's sharable with the postqueue. */
4655 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4659 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4661 /* Small args are padded downward. */
4662 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4663 rounded_size, size_int (align));
4664 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4665 size_binop (MINUS_EXPR, rounded_size, type_size));
4666 addr = fold_build2 (POINTER_PLUS_EXPR,
4667 TREE_TYPE (addr), addr, t);
4670 /* Compute new value for AP. */
4671 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4672 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4673 gimplify_and_add (t, pre_p);
4675 addr = fold_convert (build_pointer_type (type), addr);
4678 addr = build_va_arg_indirect_ref (addr);
4680 return build_va_arg_indirect_ref (addr);
4683 /* Build an indirect-ref expression over the given TREE, which represents a
4684 piece of a va_arg() expansion. */
4686 build_va_arg_indirect_ref (tree addr)
4688 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4690 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4696 /* Return a dummy expression of type TYPE in order to keep going after an
4700 dummy_object (tree type)
4702 tree t = build_int_cst (build_pointer_type (type), 0);
4703 return build1 (INDIRECT_REF, type, t);
4706 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4707 builtin function, but a very special sort of operator. */
4709 enum gimplify_status
4710 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4712 tree promoted_type, have_va_type;
4713 tree valist = TREE_OPERAND (*expr_p, 0);
4714 tree type = TREE_TYPE (*expr_p);
4716 location_t loc = EXPR_LOCATION (*expr_p);
4718 /* Verify that valist is of the proper type. */
4719 have_va_type = TREE_TYPE (valist);
4720 if (have_va_type == error_mark_node)
4722 have_va_type = targetm.canonical_va_list_type (have_va_type);
4724 if (have_va_type == NULL_TREE)
4726 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4730 /* Generate a diagnostic for requesting data of a type that cannot
4731 be passed through `...' due to type promotion at the call site. */
4732 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4735 static bool gave_help;
4738 /* Unfortunately, this is merely undefined, rather than a constraint
4739 violation, so we cannot make this an error. If this call is never
4740 executed, the program is still strictly conforming. */
4741 warned = warning_at (loc, 0,
4742 "%qT is promoted to %qT when passed through %<...%>",
4743 type, promoted_type);
4744 if (!gave_help && warned)
4747 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4748 promoted_type, type);
4751 /* We can, however, treat "undefined" any way we please.
4752 Call abort to encourage the user to fix the program. */
4754 inform (loc, "if this code is reached, the program will abort");
4755 /* Before the abort, allow the evaluation of the va_list
4756 expression to exit or longjmp. */
4757 gimplify_and_add (valist, pre_p);
4758 t = build_call_expr_loc (loc,
4759 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4760 gimplify_and_add (t, pre_p);
4762 /* This is dead code, but go ahead and finish so that the
4763 mode of the result comes out right. */
4764 *expr_p = dummy_object (type);
4769 /* Make it easier for the backends by protecting the valist argument
4770 from multiple evaluations. */
4771 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4773 /* For this case, the backends will be expecting a pointer to
4774 TREE_TYPE (abi), but it's possible we've
4775 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4777 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4779 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4780 valist = fold_convert_loc (loc, p1,
4781 build_fold_addr_expr_loc (loc, valist));
4784 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4787 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4789 if (!targetm.gimplify_va_arg_expr)
4790 /* FIXME: Once most targets are converted we should merely
4791 assert this is non-null. */
4794 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4799 /* Expand EXP, a call to __builtin_va_end. */
4802 expand_builtin_va_end (tree exp)
4804 tree valist = CALL_EXPR_ARG (exp, 0);
4806 /* Evaluate for side effects, if needed. I hate macros that don't
4808 if (TREE_SIDE_EFFECTS (valist))
4809 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4814 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4815 builtin rather than just as an assignment in stdarg.h because of the
4816 nastiness of array-type va_list types. */
4819 expand_builtin_va_copy (tree exp)
4822 location_t loc = EXPR_LOCATION (exp);
4824 dst = CALL_EXPR_ARG (exp, 0);
4825 src = CALL_EXPR_ARG (exp, 1);
4827 dst = stabilize_va_list_loc (loc, dst, 1);
4828 src = stabilize_va_list_loc (loc, src, 0);
4830 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4832 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4834 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4835 TREE_SIDE_EFFECTS (t) = 1;
4836 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4840 rtx dstb, srcb, size;
4842 /* Evaluate to pointers. */
4843 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4844 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4845 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4846 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4848 dstb = convert_memory_address (Pmode, dstb);
4849 srcb = convert_memory_address (Pmode, srcb);
4851 /* "Dereference" to BLKmode memories. */
4852 dstb = gen_rtx_MEM (BLKmode, dstb);
4853 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4854 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4855 srcb = gen_rtx_MEM (BLKmode, srcb);
4856 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4857 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4860 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4866 /* Expand a call to one of the builtin functions __builtin_frame_address or
4867 __builtin_return_address. */
4870 expand_builtin_frame_address (tree fndecl, tree exp)
4872 /* The argument must be a nonnegative integer constant.
4873 It counts the number of frames to scan up the stack.
4874 The value is the return address saved in that frame. */
4875 if (call_expr_nargs (exp) == 0)
4876 /* Warning about missing arg was already issued. */
4878 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4880 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4881 error ("invalid argument to %<__builtin_frame_address%>");
4883 error ("invalid argument to %<__builtin_return_address%>");
4889 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4890 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4892 /* Some ports cannot access arbitrary stack frames. */
4895 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4896 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4898 warning (0, "unsupported argument to %<__builtin_return_address%>");
4902 /* For __builtin_frame_address, return what we've got. */
4903 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4907 && ! CONSTANT_P (tem))
4908 tem = copy_to_mode_reg (Pmode, tem);
4913 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4914 we failed and the caller should emit a normal call, otherwise try to get
4915 the result in TARGET, if convenient. */
4918 expand_builtin_alloca (tree exp, rtx target)
4923 /* Emit normal call if marked not-inlineable. */
4924 if (CALL_CANNOT_INLINE_P (exp))
4927 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4930 /* Compute the argument. */
4931 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4933 /* Allocate the desired space. */
4934 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4935 result = convert_memory_address (ptr_mode, result);
4940 /* Expand a call to a bswap builtin with argument ARG0. MODE
4941 is the mode to expand with. */
4944 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4946 enum machine_mode mode;
4950 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4953 arg = CALL_EXPR_ARG (exp, 0);
4954 mode = TYPE_MODE (TREE_TYPE (arg));
4955 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4957 target = expand_unop (mode, bswap_optab, op0, target, 1);
4959 gcc_assert (target);
4961 return convert_to_mode (mode, target, 0);
4964 /* Expand a call to a unary builtin in EXP.
4965 Return NULL_RTX if a normal call should be emitted rather than expanding the
4966 function in-line. If convenient, the result should be placed in TARGET.
4967 SUBTARGET may be used as the target for computing one of EXP's operands. */
4970 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4971 rtx subtarget, optab op_optab)
4975 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4978 /* Compute the argument. */
4979 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4980 VOIDmode, EXPAND_NORMAL);
4981 /* Compute op, into TARGET if possible.
4982 Set TARGET to wherever the result comes back. */
4983 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4984 op_optab, op0, target, 1);
4985 gcc_assert (target);
4987 return convert_to_mode (target_mode, target, 0);
4990 /* Expand a call to __builtin_expect. We just return our argument
4991 as the builtin_expect semantic should've been already executed by
4992 tree branch prediction pass. */
4995 expand_builtin_expect (tree exp, rtx target)
4999 if (call_expr_nargs (exp) < 2)
5001 arg = CALL_EXPR_ARG (exp, 0);
5003 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5004 /* When guessing was done, the hints should be already stripped away. */
5005 gcc_assert (!flag_guess_branch_prob
5006 || optimize == 0 || errorcount || sorrycount);
5011 expand_builtin_trap (void)
5015 emit_insn (gen_trap ());
5018 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5022 /* Expand a call to __builtin_unreachable. We do nothing except emit
5023 a barrier saying that control flow will not pass here.
5025 It is the responsibility of the program being compiled to ensure
5026 that control flow does never reach __builtin_unreachable. */
5028 expand_builtin_unreachable (void)
5033 /* Expand EXP, a call to fabs, fabsf or fabsl.
5034 Return NULL_RTX if a normal call should be emitted rather than expanding
5035 the function inline. If convenient, the result should be placed
5036 in TARGET. SUBTARGET may be used as the target for computing
5040 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5042 enum machine_mode mode;
5046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5049 arg = CALL_EXPR_ARG (exp, 0);
5050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5051 mode = TYPE_MODE (TREE_TYPE (arg));
5052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5053 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5056 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5057 Return NULL is a normal call should be emitted rather than expanding the
5058 function inline. If convenient, the result should be placed in TARGET.
5059 SUBTARGET may be used as the target for computing the operand. */
5062 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5067 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5070 arg = CALL_EXPR_ARG (exp, 0);
5071 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5073 arg = CALL_EXPR_ARG (exp, 1);
5074 op1 = expand_normal (arg);
5076 return expand_copysign (op0, op1, target);
5079 /* Create a new constant string literal and return a char* pointer to it.
5080 The STRING_CST value is the LEN characters at STR. */
5082 build_string_literal (int len, const char *str)
5084 tree t, elem, index, type;
5086 t = build_string (len, str);
5087 elem = build_type_variant (char_type_node, 1, 0);
5088 index = build_index_type (size_int (len - 1));
5089 type = build_array_type (elem, index);
5090 TREE_TYPE (t) = type;
5091 TREE_CONSTANT (t) = 1;
5092 TREE_READONLY (t) = 1;
5093 TREE_STATIC (t) = 1;
5095 type = build_pointer_type (elem);
5096 t = build1 (ADDR_EXPR, type,
5097 build4 (ARRAY_REF, elem,
5098 t, integer_zero_node, NULL_TREE, NULL_TREE));
5102 /* Expand a call to either the entry or exit function profiler. */
5105 expand_builtin_profile_func (bool exitp)
5107 rtx this_rtx, which;
5109 this_rtx = DECL_RTL (current_function_decl);
5110 gcc_assert (MEM_P (this_rtx));
5111 this_rtx = XEXP (this_rtx, 0);
5114 which = profile_function_exit_libfunc;
5116 which = profile_function_entry_libfunc;
5118 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5119 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5126 /* Expand a call to __builtin___clear_cache. */
5129 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5131 #ifndef HAVE_clear_cache
5132 #ifdef CLEAR_INSN_CACHE
5133 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5134 does something. Just do the default expansion to a call to
5138 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5139 does nothing. There is no need to call it. Do nothing. */
5141 #endif /* CLEAR_INSN_CACHE */
5143 /* We have a "clear_cache" insn, and it will handle everything. */
5145 rtx begin_rtx, end_rtx;
5146 enum insn_code icode;
5148 /* We must not expand to a library call. If we did, any
5149 fallback library function in libgcc that might contain a call to
5150 __builtin___clear_cache() would recurse infinitely. */
5151 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5153 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5157 if (HAVE_clear_cache)
5159 icode = CODE_FOR_clear_cache;
5161 begin = CALL_EXPR_ARG (exp, 0);
5162 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5163 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5164 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5165 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5167 end = CALL_EXPR_ARG (exp, 1);
5168 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5169 end_rtx = convert_memory_address (Pmode, end_rtx);
5170 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5171 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5173 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5176 #endif /* HAVE_clear_cache */
5179 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5182 round_trampoline_addr (rtx tramp)
5184 rtx temp, addend, mask;
5186 /* If we don't need too much alignment, we'll have been guaranteed
5187 proper alignment by get_trampoline_type. */
5188 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5191 /* Round address up to desired boundary. */
5192 temp = gen_reg_rtx (Pmode);
5193 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5194 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5196 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5197 temp, 0, OPTAB_LIB_WIDEN);
5198 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5199 temp, 0, OPTAB_LIB_WIDEN);
5205 expand_builtin_init_trampoline (tree exp)
5207 tree t_tramp, t_func, t_chain;
5208 rtx m_tramp, r_tramp, r_chain, tmp;
5210 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5211 POINTER_TYPE, VOID_TYPE))
5214 t_tramp = CALL_EXPR_ARG (exp, 0);
5215 t_func = CALL_EXPR_ARG (exp, 1);
5216 t_chain = CALL_EXPR_ARG (exp, 2);
5218 r_tramp = expand_normal (t_tramp);
5219 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5220 MEM_NOTRAP_P (m_tramp) = 1;
5222 /* The TRAMP argument should be the address of a field within the
5223 local function's FRAME decl. Let's see if we can fill in the
5224 to fill in the MEM_ATTRs for this memory. */
5225 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5226 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5229 tmp = round_trampoline_addr (r_tramp);
5232 m_tramp = change_address (m_tramp, BLKmode, tmp);
5233 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5234 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5237 /* The FUNC argument should be the address of the nested function.
5238 Extract the actual function decl to pass to the hook. */
5239 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5240 t_func = TREE_OPERAND (t_func, 0);
5241 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5243 r_chain = expand_normal (t_chain);
5245 /* Generate insns to initialize the trampoline. */
5246 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5248 trampolines_created = 1;
5253 expand_builtin_adjust_trampoline (tree exp)
5257 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5260 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5261 tramp = round_trampoline_addr (tramp);
5262 if (targetm.calls.trampoline_adjust_address)
5263 tramp = targetm.calls.trampoline_adjust_address (tramp);
5268 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5269 function. The function first checks whether the back end provides
5270 an insn to implement signbit for the respective mode. If not, it
5271 checks whether the floating point format of the value is such that
5272 the sign bit can be extracted. If that is not the case, the
5273 function returns NULL_RTX to indicate that a normal call should be
5274 emitted rather than expanding the function in-line. EXP is the
5275 expression that is a call to the builtin function; if convenient,
5276 the result should be placed in TARGET. */
5278 expand_builtin_signbit (tree exp, rtx target)
5280 const struct real_format *fmt;
5281 enum machine_mode fmode, imode, rmode;
5284 enum insn_code icode;
5286 location_t loc = EXPR_LOCATION (exp);
5288 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5291 arg = CALL_EXPR_ARG (exp, 0);
5292 fmode = TYPE_MODE (TREE_TYPE (arg));
5293 rmode = TYPE_MODE (TREE_TYPE (exp));
5294 fmt = REAL_MODE_FORMAT (fmode);
5296 arg = builtin_save_expr (arg);
5298 /* Expand the argument yielding a RTX expression. */
5299 temp = expand_normal (arg);
5301 /* Check if the back end provides an insn that handles signbit for the
5303 icode = signbit_optab->handlers [(int) fmode].insn_code;
5304 if (icode != CODE_FOR_nothing)
5306 rtx last = get_last_insn ();
5307 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5308 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5310 delete_insns_since (last);
5313 /* For floating point formats without a sign bit, implement signbit
5315 bitpos = fmt->signbit_ro;
5318 /* But we can't do this if the format supports signed zero. */
5319 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5322 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5323 build_real (TREE_TYPE (arg), dconst0));
5324 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5327 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5329 imode = int_mode_for_mode (fmode);
5330 if (imode == BLKmode)
5332 temp = gen_lowpart (imode, temp);
5337 /* Handle targets with different FP word orders. */
5338 if (FLOAT_WORDS_BIG_ENDIAN)
5339 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5341 word = bitpos / BITS_PER_WORD;
5342 temp = operand_subword_force (temp, word, fmode);
5343 bitpos = bitpos % BITS_PER_WORD;
5346 /* Force the intermediate word_mode (or narrower) result into a
5347 register. This avoids attempting to create paradoxical SUBREGs
5348 of floating point modes below. */
5349 temp = force_reg (imode, temp);
5351 /* If the bitpos is within the "result mode" lowpart, the operation
5352 can be implement with a single bitwise AND. Otherwise, we need
5353 a right shift and an AND. */
5355 if (bitpos < GET_MODE_BITSIZE (rmode))
5357 double_int mask = double_int_setbit (double_int_zero, bitpos);
5359 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5360 temp = gen_lowpart (rmode, temp);
5361 temp = expand_binop (rmode, and_optab, temp,
5362 immed_double_int_const (mask, rmode),
5363 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5367 /* Perform a logical right shift to place the signbit in the least
5368 significant bit, then truncate the result to the desired mode
5369 and mask just this bit. */
5370 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5371 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5372 temp = gen_lowpart (rmode, temp);
5373 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5374 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5380 /* Expand fork or exec calls. TARGET is the desired target of the
5381 call. EXP is the call. FN is the
5382 identificator of the actual function. IGNORE is nonzero if the
5383 value is to be ignored. */
5386 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5391 /* If we are not profiling, just call the function. */
5392 if (!profile_arc_flag)
5395 /* Otherwise call the wrapper. This should be equivalent for the rest of
5396 compiler, so the code does not diverge, and the wrapper may run the
5397 code necessary for keeping the profiling sane. */
5399 switch (DECL_FUNCTION_CODE (fn))
5402 id = get_identifier ("__gcov_fork");
5405 case BUILT_IN_EXECL:
5406 id = get_identifier ("__gcov_execl");
5409 case BUILT_IN_EXECV:
5410 id = get_identifier ("__gcov_execv");
5413 case BUILT_IN_EXECLP:
5414 id = get_identifier ("__gcov_execlp");
5417 case BUILT_IN_EXECLE:
5418 id = get_identifier ("__gcov_execle");
5421 case BUILT_IN_EXECVP:
5422 id = get_identifier ("__gcov_execvp");
5425 case BUILT_IN_EXECVE:
5426 id = get_identifier ("__gcov_execve");
5433 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5434 FUNCTION_DECL, id, TREE_TYPE (fn));
5435 DECL_EXTERNAL (decl) = 1;
5436 TREE_PUBLIC (decl) = 1;
5437 DECL_ARTIFICIAL (decl) = 1;
5438 TREE_NOTHROW (decl) = 1;
5439 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5440 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5441 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5442 return expand_call (call, target, ignore);
5447 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5448 the pointer in these functions is void*, the tree optimizers may remove
5449 casts. The mode computed in expand_builtin isn't reliable either, due
5450 to __sync_bool_compare_and_swap.
5452 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5453 group of builtins. This gives us log2 of the mode size. */
5455 static inline enum machine_mode
5456 get_builtin_sync_mode (int fcode_diff)
5458 /* The size is not negotiable, so ask not to get BLKmode in return
5459 if the target indicates that a smaller size would be better. */
5460 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5463 /* Expand the memory expression LOC and return the appropriate memory operand
5464 for the builtin_sync operations. */
5467 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5471 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5472 addr = convert_memory_address (Pmode, addr);
5474 /* Note that we explicitly do not want any alias information for this
5475 memory, so that we kill all other live memories. Otherwise we don't
5476 satisfy the full barrier semantics of the intrinsic. */
5477 mem = validize_mem (gen_rtx_MEM (mode, addr));
5479 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5480 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5481 MEM_VOLATILE_P (mem) = 1;
5486 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5487 EXP is the CALL_EXPR. CODE is the rtx code
5488 that corresponds to the arithmetic or logical operation from the name;
5489 an exception here is that NOT actually means NAND. TARGET is an optional
5490 place for us to store the results; AFTER is true if this is the
5491 fetch_and_xxx form. IGNORE is true if we don't actually care about
5492 the result of the operation at all. */
5495 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5496 enum rtx_code code, bool after,
5497 rtx target, bool ignore)
5500 enum machine_mode old_mode;
5501 location_t loc = EXPR_LOCATION (exp);
5503 if (code == NOT && warn_sync_nand)
5505 tree fndecl = get_callee_fndecl (exp);
5506 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5508 static bool warned_f_a_n, warned_n_a_f;
5512 case BUILT_IN_FETCH_AND_NAND_1:
5513 case BUILT_IN_FETCH_AND_NAND_2:
5514 case BUILT_IN_FETCH_AND_NAND_4:
5515 case BUILT_IN_FETCH_AND_NAND_8:
5516 case BUILT_IN_FETCH_AND_NAND_16:
5521 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5522 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5523 warned_f_a_n = true;
5526 case BUILT_IN_NAND_AND_FETCH_1:
5527 case BUILT_IN_NAND_AND_FETCH_2:
5528 case BUILT_IN_NAND_AND_FETCH_4:
5529 case BUILT_IN_NAND_AND_FETCH_8:
5530 case BUILT_IN_NAND_AND_FETCH_16:
5535 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5536 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5537 warned_n_a_f = true;
5545 /* Expand the operands. */
5546 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5548 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5549 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5550 of CONST_INTs, where we know the old_mode only from the call argument. */
5551 old_mode = GET_MODE (val);
5552 if (old_mode == VOIDmode)
5553 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5554 val = convert_modes (mode, old_mode, val, 1);
5557 return expand_sync_operation (mem, val, code);
5559 return expand_sync_fetch_operation (mem, val, code, after, target);
5562 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5563 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5564 true if this is the boolean form. TARGET is a place for us to store the
5565 results; this is NOT optional if IS_BOOL is true. */
5568 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5569 bool is_bool, rtx target)
5571 rtx old_val, new_val, mem;
5572 enum machine_mode old_mode;
5574 /* Expand the operands. */
5575 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5578 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5579 mode, EXPAND_NORMAL);
5580 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5581 of CONST_INTs, where we know the old_mode only from the call argument. */
5582 old_mode = GET_MODE (old_val);
5583 if (old_mode == VOIDmode)
5584 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5585 old_val = convert_modes (mode, old_mode, old_val, 1);
5587 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5588 mode, EXPAND_NORMAL);
5589 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5590 of CONST_INTs, where we know the old_mode only from the call argument. */
5591 old_mode = GET_MODE (new_val);
5592 if (old_mode == VOIDmode)
5593 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5594 new_val = convert_modes (mode, old_mode, new_val, 1);
5597 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5599 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5602 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5603 general form is actually an atomic exchange, and some targets only
5604 support a reduced form with the second argument being a constant 1.
5605 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5609 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5613 enum machine_mode old_mode;
5615 /* Expand the operands. */
5616 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5617 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5618 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5619 of CONST_INTs, where we know the old_mode only from the call argument. */
5620 old_mode = GET_MODE (val);
5621 if (old_mode == VOIDmode)
5622 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5623 val = convert_modes (mode, old_mode, val, 1);
5625 return expand_sync_lock_test_and_set (mem, val, target);
5628 /* Expand the __sync_synchronize intrinsic. */
5631 expand_builtin_synchronize (void)
5634 VEC (tree, gc) *v_clobbers;
5636 #ifdef HAVE_memory_barrier
5637 if (HAVE_memory_barrier)
5639 emit_insn (gen_memory_barrier ());
5644 if (synchronize_libfunc != NULL_RTX)
5646 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5650 /* If no explicit memory barrier instruction is available, create an
5651 empty asm stmt with a memory clobber. */
5652 v_clobbers = VEC_alloc (tree, gc, 1);
5653 VEC_quick_push (tree, v_clobbers,
5654 tree_cons (NULL, build_string (6, "memory"), NULL));
5655 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5656 gimple_asm_set_volatile (x, true);
5657 expand_asm_stmt (x);
5660 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5663 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5665 enum insn_code icode;
5667 rtx val = const0_rtx;
5669 /* Expand the operands. */
5670 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5672 /* If there is an explicit operation in the md file, use it. */
5673 icode = sync_lock_release[mode];
5674 if (icode != CODE_FOR_nothing)
5676 if (!insn_data[icode].operand[1].predicate (val, mode))
5677 val = force_reg (mode, val);
5679 insn = GEN_FCN (icode) (mem, val);
5687 /* Otherwise we can implement this operation by emitting a barrier
5688 followed by a store of zero. */
5689 expand_builtin_synchronize ();
5690 emit_move_insn (mem, val);
5693 /* Expand an expression EXP that calls a built-in function,
5694 with result going to TARGET if that's convenient
5695 (and in mode MODE if that's convenient).
5696 SUBTARGET may be used as the target for computing one of EXP's operands.
5697 IGNORE is nonzero if the value is to be ignored. */
5700 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5703 tree fndecl = get_callee_fndecl (exp);
5704 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5705 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5707 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5708 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5710 /* When not optimizing, generate calls to library functions for a certain
5713 && !called_as_built_in (fndecl)
5714 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5715 && fcode != BUILT_IN_ALLOCA
5716 && fcode != BUILT_IN_FREE)
5717 return expand_call (exp, target, ignore);
5719 /* The built-in function expanders test for target == const0_rtx
5720 to determine whether the function's result will be ignored. */
5722 target = const0_rtx;
5724 /* If the result of a pure or const built-in function is ignored, and
5725 none of its arguments are volatile, we can avoid expanding the
5726 built-in call and just evaluate the arguments for side-effects. */
5727 if (target == const0_rtx
5728 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5730 bool volatilep = false;
5732 call_expr_arg_iterator iter;
5734 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5735 if (TREE_THIS_VOLATILE (arg))
5743 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5744 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5751 CASE_FLT_FN (BUILT_IN_FABS):
5752 target = expand_builtin_fabs (exp, target, subtarget);
5757 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5758 target = expand_builtin_copysign (exp, target, subtarget);
5763 /* Just do a normal library call if we were unable to fold
5765 CASE_FLT_FN (BUILT_IN_CABS):
5768 CASE_FLT_FN (BUILT_IN_EXP):
5769 CASE_FLT_FN (BUILT_IN_EXP10):
5770 CASE_FLT_FN (BUILT_IN_POW10):
5771 CASE_FLT_FN (BUILT_IN_EXP2):
5772 CASE_FLT_FN (BUILT_IN_EXPM1):
5773 CASE_FLT_FN (BUILT_IN_LOGB):
5774 CASE_FLT_FN (BUILT_IN_LOG):
5775 CASE_FLT_FN (BUILT_IN_LOG10):
5776 CASE_FLT_FN (BUILT_IN_LOG2):
5777 CASE_FLT_FN (BUILT_IN_LOG1P):
5778 CASE_FLT_FN (BUILT_IN_TAN):
5779 CASE_FLT_FN (BUILT_IN_ASIN):
5780 CASE_FLT_FN (BUILT_IN_ACOS):
5781 CASE_FLT_FN (BUILT_IN_ATAN):
5782 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5783 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5784 because of possible accuracy problems. */
5785 if (! flag_unsafe_math_optimizations)
5787 CASE_FLT_FN (BUILT_IN_SQRT):
5788 CASE_FLT_FN (BUILT_IN_FLOOR):
5789 CASE_FLT_FN (BUILT_IN_CEIL):
5790 CASE_FLT_FN (BUILT_IN_TRUNC):
5791 CASE_FLT_FN (BUILT_IN_ROUND):
5792 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5793 CASE_FLT_FN (BUILT_IN_RINT):
5794 target = expand_builtin_mathfn (exp, target, subtarget);
5799 CASE_FLT_FN (BUILT_IN_ILOGB):
5800 if (! flag_unsafe_math_optimizations)
5802 CASE_FLT_FN (BUILT_IN_ISINF):
5803 CASE_FLT_FN (BUILT_IN_FINITE):
5804 case BUILT_IN_ISFINITE:
5805 case BUILT_IN_ISNORMAL:
5806 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5811 CASE_FLT_FN (BUILT_IN_LCEIL):
5812 CASE_FLT_FN (BUILT_IN_LLCEIL):
5813 CASE_FLT_FN (BUILT_IN_LFLOOR):
5814 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5815 target = expand_builtin_int_roundingfn (exp, target);
5820 CASE_FLT_FN (BUILT_IN_LRINT):
5821 CASE_FLT_FN (BUILT_IN_LLRINT):
5822 CASE_FLT_FN (BUILT_IN_LROUND):
5823 CASE_FLT_FN (BUILT_IN_LLROUND):
5824 target = expand_builtin_int_roundingfn_2 (exp, target);
5829 CASE_FLT_FN (BUILT_IN_POW):
5830 target = expand_builtin_pow (exp, target, subtarget);
5835 CASE_FLT_FN (BUILT_IN_POWI):
5836 target = expand_builtin_powi (exp, target, subtarget);
5841 CASE_FLT_FN (BUILT_IN_ATAN2):
5842 CASE_FLT_FN (BUILT_IN_LDEXP):
5843 CASE_FLT_FN (BUILT_IN_SCALB):
5844 CASE_FLT_FN (BUILT_IN_SCALBN):
5845 CASE_FLT_FN (BUILT_IN_SCALBLN):
5846 if (! flag_unsafe_math_optimizations)
5849 CASE_FLT_FN (BUILT_IN_FMOD):
5850 CASE_FLT_FN (BUILT_IN_REMAINDER):
5851 CASE_FLT_FN (BUILT_IN_DREM):
5852 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5857 CASE_FLT_FN (BUILT_IN_CEXPI):
5858 target = expand_builtin_cexpi (exp, target, subtarget);
5859 gcc_assert (target);
5862 CASE_FLT_FN (BUILT_IN_SIN):
5863 CASE_FLT_FN (BUILT_IN_COS):
5864 if (! flag_unsafe_math_optimizations)
5866 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5871 CASE_FLT_FN (BUILT_IN_SINCOS):
5872 if (! flag_unsafe_math_optimizations)
5874 target = expand_builtin_sincos (exp);
5879 case BUILT_IN_APPLY_ARGS:
5880 return expand_builtin_apply_args ();
5882 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5883 FUNCTION with a copy of the parameters described by
5884 ARGUMENTS, and ARGSIZE. It returns a block of memory
5885 allocated on the stack into which is stored all the registers
5886 that might possibly be used for returning the result of a
5887 function. ARGUMENTS is the value returned by
5888 __builtin_apply_args. ARGSIZE is the number of bytes of
5889 arguments that must be copied. ??? How should this value be
5890 computed? We'll also need a safe worst case value for varargs
5892 case BUILT_IN_APPLY:
5893 if (!validate_arglist (exp, POINTER_TYPE,
5894 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5895 && !validate_arglist (exp, REFERENCE_TYPE,
5896 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5902 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5903 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5904 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5906 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5909 /* __builtin_return (RESULT) causes the function to return the
5910 value described by RESULT. RESULT is address of the block of
5911 memory returned by __builtin_apply. */
5912 case BUILT_IN_RETURN:
5913 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5914 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5917 case BUILT_IN_SAVEREGS:
5918 return expand_builtin_saveregs ();
5920 case BUILT_IN_ARGS_INFO:
5921 return expand_builtin_args_info (exp);
5923 case BUILT_IN_VA_ARG_PACK:
5924 /* All valid uses of __builtin_va_arg_pack () are removed during
5926 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5929 case BUILT_IN_VA_ARG_PACK_LEN:
5930 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5932 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5935 /* Return the address of the first anonymous stack arg. */
5936 case BUILT_IN_NEXT_ARG:
5937 if (fold_builtin_next_arg (exp, false))
5939 return expand_builtin_next_arg ();
5941 case BUILT_IN_CLEAR_CACHE:
5942 target = expand_builtin___clear_cache (exp);
5947 case BUILT_IN_CLASSIFY_TYPE:
5948 return expand_builtin_classify_type (exp);
5950 case BUILT_IN_CONSTANT_P:
5953 case BUILT_IN_FRAME_ADDRESS:
5954 case BUILT_IN_RETURN_ADDRESS:
5955 return expand_builtin_frame_address (fndecl, exp);
5957 /* Returns the address of the area where the structure is returned.
5959 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5960 if (call_expr_nargs (exp) != 0
5961 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5962 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5965 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5967 case BUILT_IN_ALLOCA:
5968 target = expand_builtin_alloca (exp, target);
5973 case BUILT_IN_STACK_SAVE:
5974 return expand_stack_save ();
5976 case BUILT_IN_STACK_RESTORE:
5977 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5980 case BUILT_IN_BSWAP32:
5981 case BUILT_IN_BSWAP64:
5982 target = expand_builtin_bswap (exp, target, subtarget);
5988 CASE_INT_FN (BUILT_IN_FFS):
5989 case BUILT_IN_FFSIMAX:
5990 target = expand_builtin_unop (target_mode, exp, target,
5991 subtarget, ffs_optab);
5996 CASE_INT_FN (BUILT_IN_CLZ):
5997 case BUILT_IN_CLZIMAX:
5998 target = expand_builtin_unop (target_mode, exp, target,
5999 subtarget, clz_optab);
6004 CASE_INT_FN (BUILT_IN_CTZ):
6005 case BUILT_IN_CTZIMAX:
6006 target = expand_builtin_unop (target_mode, exp, target,
6007 subtarget, ctz_optab);
6012 CASE_INT_FN (BUILT_IN_POPCOUNT):
6013 case BUILT_IN_POPCOUNTIMAX:
6014 target = expand_builtin_unop (target_mode, exp, target,
6015 subtarget, popcount_optab);
6020 CASE_INT_FN (BUILT_IN_PARITY):
6021 case BUILT_IN_PARITYIMAX:
6022 target = expand_builtin_unop (target_mode, exp, target,
6023 subtarget, parity_optab);
6028 case BUILT_IN_STRLEN:
6029 target = expand_builtin_strlen (exp, target, target_mode);
6034 case BUILT_IN_STRCPY:
6035 target = expand_builtin_strcpy (exp, target);
6040 case BUILT_IN_STRNCPY:
6041 target = expand_builtin_strncpy (exp, target);
6046 case BUILT_IN_STPCPY:
6047 target = expand_builtin_stpcpy (exp, target, mode);
6052 case BUILT_IN_MEMCPY:
6053 target = expand_builtin_memcpy (exp, target);
6058 case BUILT_IN_MEMPCPY:
6059 target = expand_builtin_mempcpy (exp, target, mode);
6064 case BUILT_IN_MEMSET:
6065 target = expand_builtin_memset (exp, target, mode);
6070 case BUILT_IN_BZERO:
6071 target = expand_builtin_bzero (exp);
6076 case BUILT_IN_STRCMP:
6077 target = expand_builtin_strcmp (exp, target);
6082 case BUILT_IN_STRNCMP:
6083 target = expand_builtin_strncmp (exp, target, mode);
6089 case BUILT_IN_MEMCMP:
6090 target = expand_builtin_memcmp (exp, target, mode);
6095 case BUILT_IN_SETJMP:
6096 /* This should have been lowered to the builtins below. */
6099 case BUILT_IN_SETJMP_SETUP:
6100 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6101 and the receiver label. */
6102 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6104 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6105 VOIDmode, EXPAND_NORMAL);
6106 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6107 rtx label_r = label_rtx (label);
6109 /* This is copied from the handling of non-local gotos. */
6110 expand_builtin_setjmp_setup (buf_addr, label_r);
6111 nonlocal_goto_handler_labels
6112 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6113 nonlocal_goto_handler_labels);
6114 /* ??? Do not let expand_label treat us as such since we would
6115 not want to be both on the list of non-local labels and on
6116 the list of forced labels. */
6117 FORCED_LABEL (label) = 0;
6122 case BUILT_IN_SETJMP_DISPATCHER:
6123 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6124 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6126 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6127 rtx label_r = label_rtx (label);
6129 /* Remove the dispatcher label from the list of non-local labels
6130 since the receiver labels have been added to it above. */
6131 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6136 case BUILT_IN_SETJMP_RECEIVER:
6137 /* __builtin_setjmp_receiver is passed the receiver label. */
6138 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6140 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6141 rtx label_r = label_rtx (label);
6143 expand_builtin_setjmp_receiver (label_r);
6148 /* __builtin_longjmp is passed a pointer to an array of five words.
6149 It's similar to the C library longjmp function but works with
6150 __builtin_setjmp above. */
6151 case BUILT_IN_LONGJMP:
6152 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6154 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6155 VOIDmode, EXPAND_NORMAL);
6156 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6158 if (value != const1_rtx)
6160 error ("%<__builtin_longjmp%> second argument must be 1");
6164 expand_builtin_longjmp (buf_addr, value);
6169 case BUILT_IN_NONLOCAL_GOTO:
6170 target = expand_builtin_nonlocal_goto (exp);
6175 /* This updates the setjmp buffer that is its argument with the value
6176 of the current stack pointer. */
6177 case BUILT_IN_UPDATE_SETJMP_BUF:
6178 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6181 = expand_normal (CALL_EXPR_ARG (exp, 0));
6183 expand_builtin_update_setjmp_buf (buf_addr);
6189 expand_builtin_trap ();
6192 case BUILT_IN_UNREACHABLE:
6193 expand_builtin_unreachable ();
6196 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6197 case BUILT_IN_SIGNBITD32:
6198 case BUILT_IN_SIGNBITD64:
6199 case BUILT_IN_SIGNBITD128:
6200 target = expand_builtin_signbit (exp, target);
6205 /* Various hooks for the DWARF 2 __throw routine. */
6206 case BUILT_IN_UNWIND_INIT:
6207 expand_builtin_unwind_init ();
6209 case BUILT_IN_DWARF_CFA:
6210 return virtual_cfa_rtx;
6211 #ifdef DWARF2_UNWIND_INFO
6212 case BUILT_IN_DWARF_SP_COLUMN:
6213 return expand_builtin_dwarf_sp_column ();
6214 case BUILT_IN_INIT_DWARF_REG_SIZES:
6215 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6218 case BUILT_IN_FROB_RETURN_ADDR:
6219 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6220 case BUILT_IN_EXTRACT_RETURN_ADDR:
6221 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6222 case BUILT_IN_EH_RETURN:
6223 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6224 CALL_EXPR_ARG (exp, 1));
6226 #ifdef EH_RETURN_DATA_REGNO
6227 case BUILT_IN_EH_RETURN_DATA_REGNO:
6228 return expand_builtin_eh_return_data_regno (exp);
6230 case BUILT_IN_EXTEND_POINTER:
6231 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6232 case BUILT_IN_EH_POINTER:
6233 return expand_builtin_eh_pointer (exp);
6234 case BUILT_IN_EH_FILTER:
6235 return expand_builtin_eh_filter (exp);
6236 case BUILT_IN_EH_COPY_VALUES:
6237 return expand_builtin_eh_copy_values (exp);
6239 case BUILT_IN_VA_START:
6240 return expand_builtin_va_start (exp);
6241 case BUILT_IN_VA_END:
6242 return expand_builtin_va_end (exp);
6243 case BUILT_IN_VA_COPY:
6244 return expand_builtin_va_copy (exp);
6245 case BUILT_IN_EXPECT:
6246 return expand_builtin_expect (exp, target);
6247 case BUILT_IN_PREFETCH:
6248 expand_builtin_prefetch (exp);
6251 case BUILT_IN_PROFILE_FUNC_ENTER:
6252 return expand_builtin_profile_func (false);
6253 case BUILT_IN_PROFILE_FUNC_EXIT:
6254 return expand_builtin_profile_func (true);
6256 case BUILT_IN_INIT_TRAMPOLINE:
6257 return expand_builtin_init_trampoline (exp);
6258 case BUILT_IN_ADJUST_TRAMPOLINE:
6259 return expand_builtin_adjust_trampoline (exp);
6262 case BUILT_IN_EXECL:
6263 case BUILT_IN_EXECV:
6264 case BUILT_IN_EXECLP:
6265 case BUILT_IN_EXECLE:
6266 case BUILT_IN_EXECVP:
6267 case BUILT_IN_EXECVE:
6268 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6273 case BUILT_IN_FETCH_AND_ADD_1:
6274 case BUILT_IN_FETCH_AND_ADD_2:
6275 case BUILT_IN_FETCH_AND_ADD_4:
6276 case BUILT_IN_FETCH_AND_ADD_8:
6277 case BUILT_IN_FETCH_AND_ADD_16:
6278 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6279 target = expand_builtin_sync_operation (mode, exp, PLUS,
6280 false, target, ignore);
6285 case BUILT_IN_FETCH_AND_SUB_1:
6286 case BUILT_IN_FETCH_AND_SUB_2:
6287 case BUILT_IN_FETCH_AND_SUB_4:
6288 case BUILT_IN_FETCH_AND_SUB_8:
6289 case BUILT_IN_FETCH_AND_SUB_16:
6290 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6291 target = expand_builtin_sync_operation (mode, exp, MINUS,
6292 false, target, ignore);
6297 case BUILT_IN_FETCH_AND_OR_1:
6298 case BUILT_IN_FETCH_AND_OR_2:
6299 case BUILT_IN_FETCH_AND_OR_4:
6300 case BUILT_IN_FETCH_AND_OR_8:
6301 case BUILT_IN_FETCH_AND_OR_16:
6302 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6303 target = expand_builtin_sync_operation (mode, exp, IOR,
6304 false, target, ignore);
6309 case BUILT_IN_FETCH_AND_AND_1:
6310 case BUILT_IN_FETCH_AND_AND_2:
6311 case BUILT_IN_FETCH_AND_AND_4:
6312 case BUILT_IN_FETCH_AND_AND_8:
6313 case BUILT_IN_FETCH_AND_AND_16:
6314 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6315 target = expand_builtin_sync_operation (mode, exp, AND,
6316 false, target, ignore);
6321 case BUILT_IN_FETCH_AND_XOR_1:
6322 case BUILT_IN_FETCH_AND_XOR_2:
6323 case BUILT_IN_FETCH_AND_XOR_4:
6324 case BUILT_IN_FETCH_AND_XOR_8:
6325 case BUILT_IN_FETCH_AND_XOR_16:
6326 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6327 target = expand_builtin_sync_operation (mode, exp, XOR,
6328 false, target, ignore);
6333 case BUILT_IN_FETCH_AND_NAND_1:
6334 case BUILT_IN_FETCH_AND_NAND_2:
6335 case BUILT_IN_FETCH_AND_NAND_4:
6336 case BUILT_IN_FETCH_AND_NAND_8:
6337 case BUILT_IN_FETCH_AND_NAND_16:
6338 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6339 target = expand_builtin_sync_operation (mode, exp, NOT,
6340 false, target, ignore);
6345 case BUILT_IN_ADD_AND_FETCH_1:
6346 case BUILT_IN_ADD_AND_FETCH_2:
6347 case BUILT_IN_ADD_AND_FETCH_4:
6348 case BUILT_IN_ADD_AND_FETCH_8:
6349 case BUILT_IN_ADD_AND_FETCH_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6351 target = expand_builtin_sync_operation (mode, exp, PLUS,
6352 true, target, ignore);
6357 case BUILT_IN_SUB_AND_FETCH_1:
6358 case BUILT_IN_SUB_AND_FETCH_2:
6359 case BUILT_IN_SUB_AND_FETCH_4:
6360 case BUILT_IN_SUB_AND_FETCH_8:
6361 case BUILT_IN_SUB_AND_FETCH_16:
6362 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6363 target = expand_builtin_sync_operation (mode, exp, MINUS,
6364 true, target, ignore);
6369 case BUILT_IN_OR_AND_FETCH_1:
6370 case BUILT_IN_OR_AND_FETCH_2:
6371 case BUILT_IN_OR_AND_FETCH_4:
6372 case BUILT_IN_OR_AND_FETCH_8:
6373 case BUILT_IN_OR_AND_FETCH_16:
6374 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6375 target = expand_builtin_sync_operation (mode, exp, IOR,
6376 true, target, ignore);
6381 case BUILT_IN_AND_AND_FETCH_1:
6382 case BUILT_IN_AND_AND_FETCH_2:
6383 case BUILT_IN_AND_AND_FETCH_4:
6384 case BUILT_IN_AND_AND_FETCH_8:
6385 case BUILT_IN_AND_AND_FETCH_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6387 target = expand_builtin_sync_operation (mode, exp, AND,
6388 true, target, ignore);
6393 case BUILT_IN_XOR_AND_FETCH_1:
6394 case BUILT_IN_XOR_AND_FETCH_2:
6395 case BUILT_IN_XOR_AND_FETCH_4:
6396 case BUILT_IN_XOR_AND_FETCH_8:
6397 case BUILT_IN_XOR_AND_FETCH_16:
6398 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6399 target = expand_builtin_sync_operation (mode, exp, XOR,
6400 true, target, ignore);
6405 case BUILT_IN_NAND_AND_FETCH_1:
6406 case BUILT_IN_NAND_AND_FETCH_2:
6407 case BUILT_IN_NAND_AND_FETCH_4:
6408 case BUILT_IN_NAND_AND_FETCH_8:
6409 case BUILT_IN_NAND_AND_FETCH_16:
6410 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6411 target = expand_builtin_sync_operation (mode, exp, NOT,
6412 true, target, ignore);
6417 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6418 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6419 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6420 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6421 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6422 if (mode == VOIDmode)
6423 mode = TYPE_MODE (boolean_type_node);
6424 if (!target || !register_operand (target, mode))
6425 target = gen_reg_rtx (mode);
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6428 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6433 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6434 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6435 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6436 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6437 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6439 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6444 case BUILT_IN_LOCK_TEST_AND_SET_1:
6445 case BUILT_IN_LOCK_TEST_AND_SET_2:
6446 case BUILT_IN_LOCK_TEST_AND_SET_4:
6447 case BUILT_IN_LOCK_TEST_AND_SET_8:
6448 case BUILT_IN_LOCK_TEST_AND_SET_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6450 target = expand_builtin_lock_test_and_set (mode, exp, target);
6455 case BUILT_IN_LOCK_RELEASE_1:
6456 case BUILT_IN_LOCK_RELEASE_2:
6457 case BUILT_IN_LOCK_RELEASE_4:
6458 case BUILT_IN_LOCK_RELEASE_8:
6459 case BUILT_IN_LOCK_RELEASE_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6461 expand_builtin_lock_release (mode, exp);
6464 case BUILT_IN_SYNCHRONIZE:
6465 expand_builtin_synchronize ();
6468 case BUILT_IN_OBJECT_SIZE:
6469 return expand_builtin_object_size (exp);
6471 case BUILT_IN_MEMCPY_CHK:
6472 case BUILT_IN_MEMPCPY_CHK:
6473 case BUILT_IN_MEMMOVE_CHK:
6474 case BUILT_IN_MEMSET_CHK:
6475 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6480 case BUILT_IN_STRCPY_CHK:
6481 case BUILT_IN_STPCPY_CHK:
6482 case BUILT_IN_STRNCPY_CHK:
6483 case BUILT_IN_STRCAT_CHK:
6484 case BUILT_IN_STRNCAT_CHK:
6485 case BUILT_IN_SNPRINTF_CHK:
6486 case BUILT_IN_VSNPRINTF_CHK:
6487 maybe_emit_chk_warning (exp, fcode);
6490 case BUILT_IN_SPRINTF_CHK:
6491 case BUILT_IN_VSPRINTF_CHK:
6492 maybe_emit_sprintf_chk_warning (exp, fcode);
6496 maybe_emit_free_warning (exp);
6499 default: /* just do library call, if unknown builtin */
6503 /* The switch statement above can drop through to cause the function
6504 to be called normally. */
6505 return expand_call (exp, target, ignore);
6508 /* Determine whether a tree node represents a call to a built-in
6509 function. If the tree T is a call to a built-in function with
6510 the right number of arguments of the appropriate types, return
6511 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6512 Otherwise the return value is END_BUILTINS. */
6514 enum built_in_function
6515 builtin_mathfn_code (const_tree t)
6517 const_tree fndecl, arg, parmlist;
6518 const_tree argtype, parmtype;
6519 const_call_expr_arg_iterator iter;
6521 if (TREE_CODE (t) != CALL_EXPR
6522 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6523 return END_BUILTINS;
6525 fndecl = get_callee_fndecl (t);
6526 if (fndecl == NULL_TREE
6527 || TREE_CODE (fndecl) != FUNCTION_DECL
6528 || ! DECL_BUILT_IN (fndecl)
6529 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6530 return END_BUILTINS;
6532 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6533 init_const_call_expr_arg_iterator (t, &iter);
6534 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6536 /* If a function doesn't take a variable number of arguments,
6537 the last element in the list will have type `void'. */
6538 parmtype = TREE_VALUE (parmlist);
6539 if (VOID_TYPE_P (parmtype))
6541 if (more_const_call_expr_args_p (&iter))
6542 return END_BUILTINS;
6543 return DECL_FUNCTION_CODE (fndecl);
6546 if (! more_const_call_expr_args_p (&iter))
6547 return END_BUILTINS;
6549 arg = next_const_call_expr_arg (&iter);
6550 argtype = TREE_TYPE (arg);
6552 if (SCALAR_FLOAT_TYPE_P (parmtype))
6554 if (! SCALAR_FLOAT_TYPE_P (argtype))
6555 return END_BUILTINS;
6557 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6559 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6560 return END_BUILTINS;
6562 else if (POINTER_TYPE_P (parmtype))
6564 if (! POINTER_TYPE_P (argtype))
6565 return END_BUILTINS;
6567 else if (INTEGRAL_TYPE_P (parmtype))
6569 if (! INTEGRAL_TYPE_P (argtype))
6570 return END_BUILTINS;
6573 return END_BUILTINS;
6576 /* Variable-length argument list. */
6577 return DECL_FUNCTION_CODE (fndecl);
6580 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6581 evaluate to a constant. */
6584 fold_builtin_constant_p (tree arg)
6586 /* We return 1 for a numeric type that's known to be a constant
6587 value at compile-time or for an aggregate type that's a
6588 literal constant. */
6591 /* If we know this is a constant, emit the constant of one. */
6592 if (CONSTANT_CLASS_P (arg)
6593 || (TREE_CODE (arg) == CONSTRUCTOR
6594 && TREE_CONSTANT (arg)))
6595 return integer_one_node;
6596 if (TREE_CODE (arg) == ADDR_EXPR)
6598 tree op = TREE_OPERAND (arg, 0);
6599 if (TREE_CODE (op) == STRING_CST
6600 || (TREE_CODE (op) == ARRAY_REF
6601 && integer_zerop (TREE_OPERAND (op, 1))
6602 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6603 return integer_one_node;
6606 /* If this expression has side effects, show we don't know it to be a
6607 constant. Likewise if it's a pointer or aggregate type since in
6608 those case we only want literals, since those are only optimized
6609 when generating RTL, not later.
6610 And finally, if we are compiling an initializer, not code, we
6611 need to return a definite result now; there's not going to be any
6612 more optimization done. */
6613 if (TREE_SIDE_EFFECTS (arg)
6614 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6615 || POINTER_TYPE_P (TREE_TYPE (arg))
6617 || folding_initializer)
6618 return integer_zero_node;
6623 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6624 return it as a truthvalue. */
6627 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6629 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6631 fn = built_in_decls[BUILT_IN_EXPECT];
6632 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6633 ret_type = TREE_TYPE (TREE_TYPE (fn));
6634 pred_type = TREE_VALUE (arg_types);
6635 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6637 pred = fold_convert_loc (loc, pred_type, pred);
6638 expected = fold_convert_loc (loc, expected_type, expected);
6639 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6641 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6642 build_int_cst (ret_type, 0));
6645 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6646 NULL_TREE if no simplification is possible. */
6649 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6652 enum tree_code code;
6654 /* If this is a builtin_expect within a builtin_expect keep the
6655 inner one. See through a comparison against a constant. It
6656 might have been added to create a thruthvalue. */
6658 if (COMPARISON_CLASS_P (inner)
6659 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6660 inner = TREE_OPERAND (inner, 0);
6662 if (TREE_CODE (inner) == CALL_EXPR
6663 && (fndecl = get_callee_fndecl (inner))
6664 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6665 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6668 /* Distribute the expected value over short-circuiting operators.
6669 See through the cast from truthvalue_type_node to long. */
6671 while (TREE_CODE (inner) == NOP_EXPR
6672 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6673 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6674 inner = TREE_OPERAND (inner, 0);
6676 code = TREE_CODE (inner);
6677 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6679 tree op0 = TREE_OPERAND (inner, 0);
6680 tree op1 = TREE_OPERAND (inner, 1);
6682 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6683 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6684 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6686 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6689 /* If the argument isn't invariant then there's nothing else we can do. */
6690 if (!TREE_CONSTANT (arg0))
6693 /* If we expect that a comparison against the argument will fold to
6694 a constant return the constant. In practice, this means a true
6695 constant or the address of a non-weak symbol. */
6698 if (TREE_CODE (inner) == ADDR_EXPR)
6702 inner = TREE_OPERAND (inner, 0);
6704 while (TREE_CODE (inner) == COMPONENT_REF
6705 || TREE_CODE (inner) == ARRAY_REF);
6706 if ((TREE_CODE (inner) == VAR_DECL
6707 || TREE_CODE (inner) == FUNCTION_DECL)
6708 && DECL_WEAK (inner))
6712 /* Otherwise, ARG0 already has the proper type for the return value. */
6716 /* Fold a call to __builtin_classify_type with argument ARG. */
6719 fold_builtin_classify_type (tree arg)
6722 return build_int_cst (NULL_TREE, no_type_class);
6724 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6727 /* Fold a call to __builtin_strlen with argument ARG. */
6730 fold_builtin_strlen (location_t loc, tree type, tree arg)
6732 if (!validate_arg (arg, POINTER_TYPE))
6736 tree len = c_strlen (arg, 0);
6739 return fold_convert_loc (loc, type, len);
6745 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6748 fold_builtin_inf (location_t loc, tree type, int warn)
6750 REAL_VALUE_TYPE real;
6752 /* __builtin_inff is intended to be usable to define INFINITY on all
6753 targets. If an infinity is not available, INFINITY expands "to a
6754 positive constant of type float that overflows at translation
6755 time", footnote "In this case, using INFINITY will violate the
6756 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6757 Thus we pedwarn to ensure this constraint violation is
6759 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6760 pedwarn (loc, 0, "target format does not support infinity");
6763 return build_real (type, real);
6766 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6769 fold_builtin_nan (tree arg, tree type, int quiet)
6771 REAL_VALUE_TYPE real;
6774 if (!validate_arg (arg, POINTER_TYPE))
6776 str = c_getstr (arg);
6780 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6783 return build_real (type, real);
6786 /* Return true if the floating point expression T has an integer value.
6787 We also allow +Inf, -Inf and NaN to be considered integer values. */
6790 integer_valued_real_p (tree t)
6792 switch (TREE_CODE (t))
6799 return integer_valued_real_p (TREE_OPERAND (t, 0));
6804 return integer_valued_real_p (TREE_OPERAND (t, 1));
6811 return integer_valued_real_p (TREE_OPERAND (t, 0))
6812 && integer_valued_real_p (TREE_OPERAND (t, 1));
6815 return integer_valued_real_p (TREE_OPERAND (t, 1))
6816 && integer_valued_real_p (TREE_OPERAND (t, 2));
6819 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6823 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6824 if (TREE_CODE (type) == INTEGER_TYPE)
6826 if (TREE_CODE (type) == REAL_TYPE)
6827 return integer_valued_real_p (TREE_OPERAND (t, 0));
6832 switch (builtin_mathfn_code (t))
6834 CASE_FLT_FN (BUILT_IN_CEIL):
6835 CASE_FLT_FN (BUILT_IN_FLOOR):
6836 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6837 CASE_FLT_FN (BUILT_IN_RINT):
6838 CASE_FLT_FN (BUILT_IN_ROUND):
6839 CASE_FLT_FN (BUILT_IN_TRUNC):
6842 CASE_FLT_FN (BUILT_IN_FMIN):
6843 CASE_FLT_FN (BUILT_IN_FMAX):
6844 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6845 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6858 /* FNDECL is assumed to be a builtin where truncation can be propagated
6859 across (for instance floor((double)f) == (double)floorf (f).
6860 Do the transformation for a call with argument ARG. */
6863 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6865 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6867 if (!validate_arg (arg, REAL_TYPE))
6870 /* Integer rounding functions are idempotent. */
6871 if (fcode == builtin_mathfn_code (arg))
6874 /* If argument is already integer valued, and we don't need to worry
6875 about setting errno, there's no need to perform rounding. */
6876 if (! flag_errno_math && integer_valued_real_p (arg))
6881 tree arg0 = strip_float_extensions (arg);
6882 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6883 tree newtype = TREE_TYPE (arg0);
6886 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6887 && (decl = mathfn_built_in (newtype, fcode)))
6888 return fold_convert_loc (loc, ftype,
6889 build_call_expr_loc (loc, decl, 1,
6890 fold_convert_loc (loc,
6897 /* FNDECL is assumed to be builtin which can narrow the FP type of
6898 the argument, for instance lround((double)f) -> lroundf (f).
6899 Do the transformation for a call with argument ARG. */
6902 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6904 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6906 if (!validate_arg (arg, REAL_TYPE))
6909 /* If argument is already integer valued, and we don't need to worry
6910 about setting errno, there's no need to perform rounding. */
6911 if (! flag_errno_math && integer_valued_real_p (arg))
6912 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6913 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6917 tree ftype = TREE_TYPE (arg);
6918 tree arg0 = strip_float_extensions (arg);
6919 tree newtype = TREE_TYPE (arg0);
6922 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6923 && (decl = mathfn_built_in (newtype, fcode)))
6924 return build_call_expr_loc (loc, decl, 1,
6925 fold_convert_loc (loc, newtype, arg0));
6928 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6929 sizeof (long long) == sizeof (long). */
6930 if (TYPE_PRECISION (long_long_integer_type_node)
6931 == TYPE_PRECISION (long_integer_type_node))
6933 tree newfn = NULL_TREE;
6936 CASE_FLT_FN (BUILT_IN_LLCEIL):
6937 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6940 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6941 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6944 CASE_FLT_FN (BUILT_IN_LLROUND):
6945 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6948 CASE_FLT_FN (BUILT_IN_LLRINT):
6949 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6958 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6959 return fold_convert_loc (loc,
6960 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6967 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6968 return type. Return NULL_TREE if no simplification can be made. */
6971 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6975 if (!validate_arg (arg, COMPLEX_TYPE)
6976 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6979 /* Calculate the result when the argument is a constant. */
6980 if (TREE_CODE (arg) == COMPLEX_CST
6981 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6985 if (TREE_CODE (arg) == COMPLEX_EXPR)
6987 tree real = TREE_OPERAND (arg, 0);
6988 tree imag = TREE_OPERAND (arg, 1);
6990 /* If either part is zero, cabs is fabs of the other. */
6991 if (real_zerop (real))
6992 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6993 if (real_zerop (imag))
6994 return fold_build1_loc (loc, ABS_EXPR, type, real);
6996 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6997 if (flag_unsafe_math_optimizations
6998 && operand_equal_p (real, imag, OEP_PURE_SAME))
7000 const REAL_VALUE_TYPE sqrt2_trunc
7001 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7003 return fold_build2_loc (loc, MULT_EXPR, type,
7004 fold_build1_loc (loc, ABS_EXPR, type, real),
7005 build_real (type, sqrt2_trunc));
7009 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7010 if (TREE_CODE (arg) == NEGATE_EXPR
7011 || TREE_CODE (arg) == CONJ_EXPR)
7012 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7014 /* Don't do this when optimizing for size. */
7015 if (flag_unsafe_math_optimizations
7016 && optimize && optimize_function_for_speed_p (cfun))
7018 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7020 if (sqrtfn != NULL_TREE)
7022 tree rpart, ipart, result;
7024 arg = builtin_save_expr (arg);
7026 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7027 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7029 rpart = builtin_save_expr (rpart);
7030 ipart = builtin_save_expr (ipart);
7032 result = fold_build2_loc (loc, PLUS_EXPR, type,
7033 fold_build2_loc (loc, MULT_EXPR, type,
7035 fold_build2_loc (loc, MULT_EXPR, type,
7038 return build_call_expr_loc (loc, sqrtfn, 1, result);
7045 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7046 complex tree type of the result. If NEG is true, the imaginary
7047 zero is negative. */
7050 build_complex_cproj (tree type, bool neg)
7052 REAL_VALUE_TYPE rinf, rzero = dconst0;
7056 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7057 build_real (TREE_TYPE (type), rzero));
7060 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7061 return type. Return NULL_TREE if no simplification can be made. */
7064 fold_builtin_cproj (location_t loc, tree arg, tree type)
7066 if (!validate_arg (arg, COMPLEX_TYPE)
7067 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7070 /* If there are no infinities, return arg. */
7071 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7072 return non_lvalue_loc (loc, arg);
7074 /* Calculate the result when the argument is a constant. */
7075 if (TREE_CODE (arg) == COMPLEX_CST)
7077 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7078 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7080 if (real_isinf (real) || real_isinf (imag))
7081 return build_complex_cproj (type, imag->sign);
7089 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7090 Return NULL_TREE if no simplification can be made. */
7093 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7096 enum built_in_function fcode;
7099 if (!validate_arg (arg, REAL_TYPE))
7102 /* Calculate the result when the argument is a constant. */
7103 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7106 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7107 fcode = builtin_mathfn_code (arg);
7108 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7110 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7111 arg = fold_build2_loc (loc, MULT_EXPR, type,
7112 CALL_EXPR_ARG (arg, 0),
7113 build_real (type, dconsthalf));
7114 return build_call_expr_loc (loc, expfn, 1, arg);
7117 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7118 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7120 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7124 tree arg0 = CALL_EXPR_ARG (arg, 0);
7126 /* The inner root was either sqrt or cbrt. */
7127 /* This was a conditional expression but it triggered a bug
7129 REAL_VALUE_TYPE dconstroot;
7130 if (BUILTIN_SQRT_P (fcode))
7131 dconstroot = dconsthalf;
7133 dconstroot = dconst_third ();
7135 /* Adjust for the outer root. */
7136 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7137 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7138 tree_root = build_real (type, dconstroot);
7139 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7143 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7144 if (flag_unsafe_math_optimizations
7145 && (fcode == BUILT_IN_POW
7146 || fcode == BUILT_IN_POWF
7147 || fcode == BUILT_IN_POWL))
7149 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7150 tree arg0 = CALL_EXPR_ARG (arg, 0);
7151 tree arg1 = CALL_EXPR_ARG (arg, 1);
7153 if (!tree_expr_nonnegative_p (arg0))
7154 arg0 = build1 (ABS_EXPR, type, arg0);
7155 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7156 build_real (type, dconsthalf));
7157 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7163 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7164 Return NULL_TREE if no simplification can be made. */
7167 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7169 const enum built_in_function fcode = builtin_mathfn_code (arg);
7172 if (!validate_arg (arg, REAL_TYPE))
7175 /* Calculate the result when the argument is a constant. */
7176 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7179 if (flag_unsafe_math_optimizations)
7181 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7182 if (BUILTIN_EXPONENT_P (fcode))
7184 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7185 const REAL_VALUE_TYPE third_trunc =
7186 real_value_truncate (TYPE_MODE (type), dconst_third ());
7187 arg = fold_build2_loc (loc, MULT_EXPR, type,
7188 CALL_EXPR_ARG (arg, 0),
7189 build_real (type, third_trunc));
7190 return build_call_expr_loc (loc, expfn, 1, arg);
7193 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7194 if (BUILTIN_SQRT_P (fcode))
7196 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7200 tree arg0 = CALL_EXPR_ARG (arg, 0);
7202 REAL_VALUE_TYPE dconstroot = dconst_third ();
7204 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7205 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7206 tree_root = build_real (type, dconstroot);
7207 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7211 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7212 if (BUILTIN_CBRT_P (fcode))
7214 tree arg0 = CALL_EXPR_ARG (arg, 0);
7215 if (tree_expr_nonnegative_p (arg0))
7217 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7222 REAL_VALUE_TYPE dconstroot;
7224 real_arithmetic (&dconstroot, MULT_EXPR,
7225 dconst_third_ptr (), dconst_third_ptr ());
7226 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7227 tree_root = build_real (type, dconstroot);
7228 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7233 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7234 if (fcode == BUILT_IN_POW
7235 || fcode == BUILT_IN_POWF
7236 || fcode == BUILT_IN_POWL)
7238 tree arg00 = CALL_EXPR_ARG (arg, 0);
7239 tree arg01 = CALL_EXPR_ARG (arg, 1);
7240 if (tree_expr_nonnegative_p (arg00))
7242 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7243 const REAL_VALUE_TYPE dconstroot
7244 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7245 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7246 build_real (type, dconstroot));
7247 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7254 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7255 TYPE is the type of the return value. Return NULL_TREE if no
7256 simplification can be made. */
7259 fold_builtin_cos (location_t loc,
7260 tree arg, tree type, tree fndecl)
7264 if (!validate_arg (arg, REAL_TYPE))
7267 /* Calculate the result when the argument is a constant. */
7268 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7271 /* Optimize cos(-x) into cos (x). */
7272 if ((narg = fold_strip_sign_ops (arg)))
7273 return build_call_expr_loc (loc, fndecl, 1, narg);
7278 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7279 Return NULL_TREE if no simplification can be made. */
7282 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7284 if (validate_arg (arg, REAL_TYPE))
7288 /* Calculate the result when the argument is a constant. */
7289 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7292 /* Optimize cosh(-x) into cosh (x). */
7293 if ((narg = fold_strip_sign_ops (arg)))
7294 return build_call_expr_loc (loc, fndecl, 1, narg);
7300 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7301 argument ARG. TYPE is the type of the return value. Return
7302 NULL_TREE if no simplification can be made. */
7305 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7308 if (validate_arg (arg, COMPLEX_TYPE)
7309 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7313 /* Calculate the result when the argument is a constant. */
7314 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7317 /* Optimize fn(-x) into fn(x). */
7318 if ((tmp = fold_strip_sign_ops (arg)))
7319 return build_call_expr_loc (loc, fndecl, 1, tmp);
7325 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7326 Return NULL_TREE if no simplification can be made. */
7329 fold_builtin_tan (tree arg, tree type)
7331 enum built_in_function fcode;
7334 if (!validate_arg (arg, REAL_TYPE))
7337 /* Calculate the result when the argument is a constant. */
7338 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7341 /* Optimize tan(atan(x)) = x. */
7342 fcode = builtin_mathfn_code (arg);
7343 if (flag_unsafe_math_optimizations
7344 && (fcode == BUILT_IN_ATAN
7345 || fcode == BUILT_IN_ATANF
7346 || fcode == BUILT_IN_ATANL))
7347 return CALL_EXPR_ARG (arg, 0);
7352 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7353 NULL_TREE if no simplification can be made. */
7356 fold_builtin_sincos (location_t loc,
7357 tree arg0, tree arg1, tree arg2)
7362 if (!validate_arg (arg0, REAL_TYPE)
7363 || !validate_arg (arg1, POINTER_TYPE)
7364 || !validate_arg (arg2, POINTER_TYPE))
7367 type = TREE_TYPE (arg0);
7369 /* Calculate the result when the argument is a constant. */
7370 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7373 /* Canonicalize sincos to cexpi. */
7374 if (!TARGET_C99_FUNCTIONS)
7376 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7380 call = build_call_expr_loc (loc, fn, 1, arg0);
7381 call = builtin_save_expr (call);
7383 return build2 (COMPOUND_EXPR, void_type_node,
7384 build2 (MODIFY_EXPR, void_type_node,
7385 build_fold_indirect_ref_loc (loc, arg1),
7386 build1 (IMAGPART_EXPR, type, call)),
7387 build2 (MODIFY_EXPR, void_type_node,
7388 build_fold_indirect_ref_loc (loc, arg2),
7389 build1 (REALPART_EXPR, type, call)));
7392 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7393 NULL_TREE if no simplification can be made. */
7396 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7399 tree realp, imagp, ifn;
7402 if (!validate_arg (arg0, COMPLEX_TYPE)
7403 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7406 /* Calculate the result when the argument is a constant. */
7407 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7410 rtype = TREE_TYPE (TREE_TYPE (arg0));
7412 /* In case we can figure out the real part of arg0 and it is constant zero
7414 if (!TARGET_C99_FUNCTIONS)
7416 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7420 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7421 && real_zerop (realp))
7423 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7424 return build_call_expr_loc (loc, ifn, 1, narg);
7427 /* In case we can easily decompose real and imaginary parts split cexp
7428 to exp (r) * cexpi (i). */
7429 if (flag_unsafe_math_optimizations
7432 tree rfn, rcall, icall;
7434 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7438 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7442 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7443 icall = builtin_save_expr (icall);
7444 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7445 rcall = builtin_save_expr (rcall);
7446 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7447 fold_build2_loc (loc, MULT_EXPR, rtype,
7449 fold_build1_loc (loc, REALPART_EXPR,
7451 fold_build2_loc (loc, MULT_EXPR, rtype,
7453 fold_build1_loc (loc, IMAGPART_EXPR,
7460 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7461 Return NULL_TREE if no simplification can be made. */
7464 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7466 if (!validate_arg (arg, REAL_TYPE))
7469 /* Optimize trunc of constant value. */
7470 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7472 REAL_VALUE_TYPE r, x;
7473 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7475 x = TREE_REAL_CST (arg);
7476 real_trunc (&r, TYPE_MODE (type), &x);
7477 return build_real (type, r);
7480 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7483 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7484 Return NULL_TREE if no simplification can be made. */
7487 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7489 if (!validate_arg (arg, REAL_TYPE))
7492 /* Optimize floor of constant value. */
7493 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7497 x = TREE_REAL_CST (arg);
7498 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7500 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7503 real_floor (&r, TYPE_MODE (type), &x);
7504 return build_real (type, r);
7508 /* Fold floor (x) where x is nonnegative to trunc (x). */
7509 if (tree_expr_nonnegative_p (arg))
7511 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7513 return build_call_expr_loc (loc, truncfn, 1, arg);
7516 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7519 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7520 Return NULL_TREE if no simplification can be made. */
7523 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7525 if (!validate_arg (arg, REAL_TYPE))
7528 /* Optimize ceil of constant value. */
7529 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7533 x = TREE_REAL_CST (arg);
7534 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7536 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7539 real_ceil (&r, TYPE_MODE (type), &x);
7540 return build_real (type, r);
7544 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7547 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7548 Return NULL_TREE if no simplification can be made. */
7551 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7553 if (!validate_arg (arg, REAL_TYPE))
7556 /* Optimize round of constant value. */
7557 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7561 x = TREE_REAL_CST (arg);
7562 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7564 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7567 real_round (&r, TYPE_MODE (type), &x);
7568 return build_real (type, r);
7572 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7575 /* Fold function call to builtin lround, lroundf or lroundl (or the
7576 corresponding long long versions) and other rounding functions. ARG
7577 is the argument to the call. Return NULL_TREE if no simplification
7581 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7583 if (!validate_arg (arg, REAL_TYPE))
7586 /* Optimize lround of constant value. */
7587 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7589 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7591 if (real_isfinite (&x))
7593 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7594 tree ftype = TREE_TYPE (arg);
7595 unsigned HOST_WIDE_INT lo2;
7596 HOST_WIDE_INT hi, lo;
7599 switch (DECL_FUNCTION_CODE (fndecl))
7601 CASE_FLT_FN (BUILT_IN_LFLOOR):
7602 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7603 real_floor (&r, TYPE_MODE (ftype), &x);
7606 CASE_FLT_FN (BUILT_IN_LCEIL):
7607 CASE_FLT_FN (BUILT_IN_LLCEIL):
7608 real_ceil (&r, TYPE_MODE (ftype), &x);
7611 CASE_FLT_FN (BUILT_IN_LROUND):
7612 CASE_FLT_FN (BUILT_IN_LLROUND):
7613 real_round (&r, TYPE_MODE (ftype), &x);
7620 REAL_VALUE_TO_INT (&lo, &hi, r);
7621 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7622 return build_int_cst_wide (itype, lo2, hi);
7626 switch (DECL_FUNCTION_CODE (fndecl))
7628 CASE_FLT_FN (BUILT_IN_LFLOOR):
7629 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7630 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7631 if (tree_expr_nonnegative_p (arg))
7632 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7633 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7638 return fold_fixed_mathfn (loc, fndecl, arg);
7641 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7642 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7643 the argument to the call. Return NULL_TREE if no simplification can
7647 fold_builtin_bitop (tree fndecl, tree arg)
7649 if (!validate_arg (arg, INTEGER_TYPE))
7652 /* Optimize for constant argument. */
7653 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7655 HOST_WIDE_INT hi, width, result;
7656 unsigned HOST_WIDE_INT lo;
7659 type = TREE_TYPE (arg);
7660 width = TYPE_PRECISION (type);
7661 lo = TREE_INT_CST_LOW (arg);
7663 /* Clear all the bits that are beyond the type's precision. */
7664 if (width > HOST_BITS_PER_WIDE_INT)
7666 hi = TREE_INT_CST_HIGH (arg);
7667 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7668 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7673 if (width < HOST_BITS_PER_WIDE_INT)
7674 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7677 switch (DECL_FUNCTION_CODE (fndecl))
7679 CASE_INT_FN (BUILT_IN_FFS):
7681 result = exact_log2 (lo & -lo) + 1;
7683 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7688 CASE_INT_FN (BUILT_IN_CLZ):
7690 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7692 result = width - floor_log2 (lo) - 1;
7693 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7697 CASE_INT_FN (BUILT_IN_CTZ):
7699 result = exact_log2 (lo & -lo);
7701 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7702 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7706 CASE_INT_FN (BUILT_IN_POPCOUNT):
7709 result++, lo &= lo - 1;
7711 result++, hi &= hi - 1;
7714 CASE_INT_FN (BUILT_IN_PARITY):
7717 result++, lo &= lo - 1;
7719 result++, hi &= hi - 1;
7727 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7733 /* Fold function call to builtin_bswap and the long and long long
7734 variants. Return NULL_TREE if no simplification can be made. */
7736 fold_builtin_bswap (tree fndecl, tree arg)
7738 if (! validate_arg (arg, INTEGER_TYPE))
7741 /* Optimize constant value. */
7742 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7744 HOST_WIDE_INT hi, width, r_hi = 0;
7745 unsigned HOST_WIDE_INT lo, r_lo = 0;
7748 type = TREE_TYPE (arg);
7749 width = TYPE_PRECISION (type);
7750 lo = TREE_INT_CST_LOW (arg);
7751 hi = TREE_INT_CST_HIGH (arg);
7753 switch (DECL_FUNCTION_CODE (fndecl))
7755 case BUILT_IN_BSWAP32:
7756 case BUILT_IN_BSWAP64:
7760 for (s = 0; s < width; s += 8)
7762 int d = width - s - 8;
7763 unsigned HOST_WIDE_INT byte;
7765 if (s < HOST_BITS_PER_WIDE_INT)
7766 byte = (lo >> s) & 0xff;
7768 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7770 if (d < HOST_BITS_PER_WIDE_INT)
7773 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7783 if (width < HOST_BITS_PER_WIDE_INT)
7784 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7786 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7792 /* A subroutine of fold_builtin to fold the various logarithmic
7793 functions. Return NULL_TREE if no simplification can me made.
7794 FUNC is the corresponding MPFR logarithm function. */
7797 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7798 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7800 if (validate_arg (arg, REAL_TYPE))
7802 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7804 const enum built_in_function fcode = builtin_mathfn_code (arg);
7806 /* Calculate the result when the argument is a constant. */
7807 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7810 /* Special case, optimize logN(expN(x)) = x. */
7811 if (flag_unsafe_math_optimizations
7812 && ((func == mpfr_log
7813 && (fcode == BUILT_IN_EXP
7814 || fcode == BUILT_IN_EXPF
7815 || fcode == BUILT_IN_EXPL))
7816 || (func == mpfr_log2
7817 && (fcode == BUILT_IN_EXP2
7818 || fcode == BUILT_IN_EXP2F
7819 || fcode == BUILT_IN_EXP2L))
7820 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7821 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7823 /* Optimize logN(func()) for various exponential functions. We
7824 want to determine the value "x" and the power "exponent" in
7825 order to transform logN(x**exponent) into exponent*logN(x). */
7826 if (flag_unsafe_math_optimizations)
7828 tree exponent = 0, x = 0;
7832 CASE_FLT_FN (BUILT_IN_EXP):
7833 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7834 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7836 exponent = CALL_EXPR_ARG (arg, 0);
7838 CASE_FLT_FN (BUILT_IN_EXP2):
7839 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7840 x = build_real (type, dconst2);
7841 exponent = CALL_EXPR_ARG (arg, 0);
7843 CASE_FLT_FN (BUILT_IN_EXP10):
7844 CASE_FLT_FN (BUILT_IN_POW10):
7845 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7847 REAL_VALUE_TYPE dconst10;
7848 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7849 x = build_real (type, dconst10);
7851 exponent = CALL_EXPR_ARG (arg, 0);
7853 CASE_FLT_FN (BUILT_IN_SQRT):
7854 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7855 x = CALL_EXPR_ARG (arg, 0);
7856 exponent = build_real (type, dconsthalf);
7858 CASE_FLT_FN (BUILT_IN_CBRT):
7859 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7860 x = CALL_EXPR_ARG (arg, 0);
7861 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7864 CASE_FLT_FN (BUILT_IN_POW):
7865 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7866 x = CALL_EXPR_ARG (arg, 0);
7867 exponent = CALL_EXPR_ARG (arg, 1);
7873 /* Now perform the optimization. */
7876 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7877 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7885 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7886 NULL_TREE if no simplification can be made. */
7889 fold_builtin_hypot (location_t loc, tree fndecl,
7890 tree arg0, tree arg1, tree type)
7892 tree res, narg0, narg1;
7894 if (!validate_arg (arg0, REAL_TYPE)
7895 || !validate_arg (arg1, REAL_TYPE))
7898 /* Calculate the result when the argument is a constant. */
7899 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7902 /* If either argument to hypot has a negate or abs, strip that off.
7903 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7904 narg0 = fold_strip_sign_ops (arg0);
7905 narg1 = fold_strip_sign_ops (arg1);
7908 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7909 narg1 ? narg1 : arg1);
7912 /* If either argument is zero, hypot is fabs of the other. */
7913 if (real_zerop (arg0))
7914 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7915 else if (real_zerop (arg1))
7916 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7918 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7919 if (flag_unsafe_math_optimizations
7920 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7922 const REAL_VALUE_TYPE sqrt2_trunc
7923 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7924 return fold_build2_loc (loc, MULT_EXPR, type,
7925 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7926 build_real (type, sqrt2_trunc));
7933 /* Fold a builtin function call to pow, powf, or powl. Return
7934 NULL_TREE if no simplification can be made. */
7936 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7940 if (!validate_arg (arg0, REAL_TYPE)
7941 || !validate_arg (arg1, REAL_TYPE))
7944 /* Calculate the result when the argument is a constant. */
7945 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7948 /* Optimize pow(1.0,y) = 1.0. */
7949 if (real_onep (arg0))
7950 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7952 if (TREE_CODE (arg1) == REAL_CST
7953 && !TREE_OVERFLOW (arg1))
7955 REAL_VALUE_TYPE cint;
7959 c = TREE_REAL_CST (arg1);
7961 /* Optimize pow(x,0.0) = 1.0. */
7962 if (REAL_VALUES_EQUAL (c, dconst0))
7963 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7966 /* Optimize pow(x,1.0) = x. */
7967 if (REAL_VALUES_EQUAL (c, dconst1))
7970 /* Optimize pow(x,-1.0) = 1.0/x. */
7971 if (REAL_VALUES_EQUAL (c, dconstm1))
7972 return fold_build2_loc (loc, RDIV_EXPR, type,
7973 build_real (type, dconst1), arg0);
7975 /* Optimize pow(x,0.5) = sqrt(x). */
7976 if (flag_unsafe_math_optimizations
7977 && REAL_VALUES_EQUAL (c, dconsthalf))
7979 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7981 if (sqrtfn != NULL_TREE)
7982 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7985 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7986 if (flag_unsafe_math_optimizations)
7988 const REAL_VALUE_TYPE dconstroot
7989 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7991 if (REAL_VALUES_EQUAL (c, dconstroot))
7993 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7994 if (cbrtfn != NULL_TREE)
7995 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7999 /* Check for an integer exponent. */
8000 n = real_to_integer (&c);
8001 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8002 if (real_identical (&c, &cint))
8004 /* Attempt to evaluate pow at compile-time, unless this should
8005 raise an exception. */
8006 if (TREE_CODE (arg0) == REAL_CST
8007 && !TREE_OVERFLOW (arg0)
8009 || (!flag_trapping_math && !flag_errno_math)
8010 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8015 x = TREE_REAL_CST (arg0);
8016 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8017 if (flag_unsafe_math_optimizations || !inexact)
8018 return build_real (type, x);
8021 /* Strip sign ops from even integer powers. */
8022 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8024 tree narg0 = fold_strip_sign_ops (arg0);
8026 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8031 if (flag_unsafe_math_optimizations)
8033 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8035 /* Optimize pow(expN(x),y) = expN(x*y). */
8036 if (BUILTIN_EXPONENT_P (fcode))
8038 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8039 tree arg = CALL_EXPR_ARG (arg0, 0);
8040 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8041 return build_call_expr_loc (loc, expfn, 1, arg);
8044 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8045 if (BUILTIN_SQRT_P (fcode))
8047 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8048 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8049 build_real (type, dconsthalf));
8050 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8053 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8054 if (BUILTIN_CBRT_P (fcode))
8056 tree arg = CALL_EXPR_ARG (arg0, 0);
8057 if (tree_expr_nonnegative_p (arg))
8059 const REAL_VALUE_TYPE dconstroot
8060 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8061 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8062 build_real (type, dconstroot));
8063 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8067 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8068 if (fcode == BUILT_IN_POW
8069 || fcode == BUILT_IN_POWF
8070 || fcode == BUILT_IN_POWL)
8072 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8073 if (tree_expr_nonnegative_p (arg00))
8075 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8076 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8077 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8085 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8086 Return NULL_TREE if no simplification can be made. */
8088 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8089 tree arg0, tree arg1, tree type)
8091 if (!validate_arg (arg0, REAL_TYPE)
8092 || !validate_arg (arg1, INTEGER_TYPE))
8095 /* Optimize pow(1.0,y) = 1.0. */
8096 if (real_onep (arg0))
8097 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8099 if (host_integerp (arg1, 0))
8101 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8103 /* Evaluate powi at compile-time. */
8104 if (TREE_CODE (arg0) == REAL_CST
8105 && !TREE_OVERFLOW (arg0))
8108 x = TREE_REAL_CST (arg0);
8109 real_powi (&x, TYPE_MODE (type), &x, c);
8110 return build_real (type, x);
8113 /* Optimize pow(x,0) = 1.0. */
8115 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8118 /* Optimize pow(x,1) = x. */
8122 /* Optimize pow(x,-1) = 1.0/x. */
8124 return fold_build2_loc (loc, RDIV_EXPR, type,
8125 build_real (type, dconst1), arg0);
8131 /* A subroutine of fold_builtin to fold the various exponent
8132 functions. Return NULL_TREE if no simplification can be made.
8133 FUNC is the corresponding MPFR exponent function. */
8136 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8137 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8139 if (validate_arg (arg, REAL_TYPE))
8141 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8144 /* Calculate the result when the argument is a constant. */
8145 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8148 /* Optimize expN(logN(x)) = x. */
8149 if (flag_unsafe_math_optimizations)
8151 const enum built_in_function fcode = builtin_mathfn_code (arg);
8153 if ((func == mpfr_exp
8154 && (fcode == BUILT_IN_LOG
8155 || fcode == BUILT_IN_LOGF
8156 || fcode == BUILT_IN_LOGL))
8157 || (func == mpfr_exp2
8158 && (fcode == BUILT_IN_LOG2
8159 || fcode == BUILT_IN_LOG2F
8160 || fcode == BUILT_IN_LOG2L))
8161 || (func == mpfr_exp10
8162 && (fcode == BUILT_IN_LOG10
8163 || fcode == BUILT_IN_LOG10F
8164 || fcode == BUILT_IN_LOG10L)))
8165 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8172 /* Return true if VAR is a VAR_DECL or a component thereof. */
8175 var_decl_component_p (tree var)
8178 while (handled_component_p (inner))
8179 inner = TREE_OPERAND (inner, 0);
8180 return SSA_VAR_P (inner);
8183 /* Fold function call to builtin memset. Return
8184 NULL_TREE if no simplification can be made. */
8187 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8188 tree type, bool ignore)
8190 tree var, ret, etype;
8191 unsigned HOST_WIDE_INT length, cval;
8193 if (! validate_arg (dest, POINTER_TYPE)
8194 || ! validate_arg (c, INTEGER_TYPE)
8195 || ! validate_arg (len, INTEGER_TYPE))
8198 if (! host_integerp (len, 1))
8201 /* If the LEN parameter is zero, return DEST. */
8202 if (integer_zerop (len))
8203 return omit_one_operand_loc (loc, type, dest, c);
8205 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8210 if (TREE_CODE (var) != ADDR_EXPR)
8213 var = TREE_OPERAND (var, 0);
8214 if (TREE_THIS_VOLATILE (var))
8217 etype = TREE_TYPE (var);
8218 if (TREE_CODE (etype) == ARRAY_TYPE)
8219 etype = TREE_TYPE (etype);
8221 if (!INTEGRAL_TYPE_P (etype)
8222 && !POINTER_TYPE_P (etype))
8225 if (! var_decl_component_p (var))
8228 length = tree_low_cst (len, 1);
8229 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8230 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8234 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8237 if (integer_zerop (c))
8241 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8244 cval = tree_low_cst (c, 1);
8248 cval |= (cval << 31) << 1;
8251 ret = build_int_cst_type (etype, cval);
8252 var = build_fold_indirect_ref_loc (loc,
8253 fold_convert_loc (loc,
8254 build_pointer_type (etype),
8256 ret = build2 (MODIFY_EXPR, etype, var, ret);
8260 return omit_one_operand_loc (loc, type, dest, ret);
8263 /* Fold function call to builtin memset. Return
8264 NULL_TREE if no simplification can be made. */
8267 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8269 if (! validate_arg (dest, POINTER_TYPE)
8270 || ! validate_arg (size, INTEGER_TYPE))
8276 /* New argument list transforming bzero(ptr x, int y) to
8277 memset(ptr x, int 0, size_t y). This is done this way
8278 so that if it isn't expanded inline, we fallback to
8279 calling bzero instead of memset. */
8281 return fold_builtin_memset (loc, dest, integer_zero_node,
8282 fold_convert_loc (loc, sizetype, size),
8283 void_type_node, ignore);
8286 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8287 NULL_TREE if no simplification can be made.
8288 If ENDP is 0, return DEST (like memcpy).
8289 If ENDP is 1, return DEST+LEN (like mempcpy).
8290 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8291 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8295 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8296 tree len, tree type, bool ignore, int endp)
8298 tree destvar, srcvar, expr;
8300 if (! validate_arg (dest, POINTER_TYPE)
8301 || ! validate_arg (src, POINTER_TYPE)
8302 || ! validate_arg (len, INTEGER_TYPE))
8305 /* If the LEN parameter is zero, return DEST. */
8306 if (integer_zerop (len))
8307 return omit_one_operand_loc (loc, type, dest, src);
8309 /* If SRC and DEST are the same (and not volatile), return
8310 DEST{,+LEN,+LEN-1}. */
8311 if (operand_equal_p (src, dest, 0))
8315 tree srctype, desttype;
8316 int src_align, dest_align;
8320 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8321 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8323 /* Both DEST and SRC must be pointer types.
8324 ??? This is what old code did. Is the testing for pointer types
8327 If either SRC is readonly or length is 1, we can use memcpy. */
8328 if (!dest_align || !src_align)
8330 if (readonly_data_expr (src)
8331 || (host_integerp (len, 1)
8332 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8333 >= tree_low_cst (len, 1))))
8335 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8338 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8341 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8342 srcvar = build_fold_indirect_ref_loc (loc, src);
8343 destvar = build_fold_indirect_ref_loc (loc, dest);
8345 && !TREE_THIS_VOLATILE (srcvar)
8347 && !TREE_THIS_VOLATILE (destvar))
8349 tree src_base, dest_base, fn;
8350 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8351 HOST_WIDE_INT size = -1;
8352 HOST_WIDE_INT maxsize = -1;
8355 if (handled_component_p (src_base))
8356 src_base = get_ref_base_and_extent (src_base, &src_offset,
8358 dest_base = destvar;
8359 if (handled_component_p (dest_base))
8360 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8362 if (host_integerp (len, 1))
8364 maxsize = tree_low_cst (len, 1);
8366 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8369 maxsize *= BITS_PER_UNIT;
8373 if (SSA_VAR_P (src_base)
8374 && SSA_VAR_P (dest_base))
8376 if (operand_equal_p (src_base, dest_base, 0)
8377 && ranges_overlap_p (src_offset, maxsize,
8378 dest_offset, maxsize))
8381 else if (TREE_CODE (src_base) == INDIRECT_REF
8382 && TREE_CODE (dest_base) == INDIRECT_REF)
8384 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8385 TREE_OPERAND (dest_base, 0), 0)
8386 || ranges_overlap_p (src_offset, maxsize,
8387 dest_offset, maxsize))
8393 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8396 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8401 if (!host_integerp (len, 0))
8404 This logic lose for arguments like (type *)malloc (sizeof (type)),
8405 since we strip the casts of up to VOID return value from malloc.
8406 Perhaps we ought to inherit type from non-VOID argument here? */
8409 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8410 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8412 tree tem = TREE_OPERAND (src, 0);
8414 if (tem != TREE_OPERAND (src, 0))
8415 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8417 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8419 tree tem = TREE_OPERAND (dest, 0);
8421 if (tem != TREE_OPERAND (dest, 0))
8422 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8424 srctype = TREE_TYPE (TREE_TYPE (src));
8426 && TREE_CODE (srctype) == ARRAY_TYPE
8427 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8429 srctype = TREE_TYPE (srctype);
8431 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8433 desttype = TREE_TYPE (TREE_TYPE (dest));
8435 && TREE_CODE (desttype) == ARRAY_TYPE
8436 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8438 desttype = TREE_TYPE (desttype);
8440 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8442 if (!srctype || !desttype
8443 || !TYPE_SIZE_UNIT (srctype)
8444 || !TYPE_SIZE_UNIT (desttype)
8445 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8446 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8447 || TYPE_VOLATILE (srctype)
8448 || TYPE_VOLATILE (desttype))
8451 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8452 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8453 if (dest_align < (int) TYPE_ALIGN (desttype)
8454 || src_align < (int) TYPE_ALIGN (srctype))
8458 dest = builtin_save_expr (dest);
8461 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8463 srcvar = build_fold_indirect_ref_loc (loc, src);
8464 if (TREE_THIS_VOLATILE (srcvar))
8466 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8468 /* With memcpy, it is possible to bypass aliasing rules, so without
8469 this check i.e. execute/20060930-2.c would be misoptimized,
8470 because it use conflicting alias set to hold argument for the
8471 memcpy call. This check is probably unnecessary with
8472 -fno-strict-aliasing. Similarly for destvar. See also
8474 else if (!var_decl_component_p (srcvar))
8478 destvar = NULL_TREE;
8479 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8481 destvar = build_fold_indirect_ref_loc (loc, dest);
8482 if (TREE_THIS_VOLATILE (destvar))
8484 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8485 destvar = NULL_TREE;
8486 else if (!var_decl_component_p (destvar))
8487 destvar = NULL_TREE;
8490 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8493 if (srcvar == NULL_TREE)
8496 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8499 srctype = build_qualified_type (desttype, 0);
8500 if (src_align < (int) TYPE_ALIGN (srctype))
8502 if (AGGREGATE_TYPE_P (srctype)
8503 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8506 srctype = build_variant_type_copy (srctype);
8507 TYPE_ALIGN (srctype) = src_align;
8508 TYPE_USER_ALIGN (srctype) = 1;
8509 TYPE_PACKED (srctype) = 1;
8511 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8512 src = fold_convert_loc (loc, srcptype, src);
8513 srcvar = build_fold_indirect_ref_loc (loc, src);
8515 else if (destvar == NULL_TREE)
8518 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8521 desttype = build_qualified_type (srctype, 0);
8522 if (dest_align < (int) TYPE_ALIGN (desttype))
8524 if (AGGREGATE_TYPE_P (desttype)
8525 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8528 desttype = build_variant_type_copy (desttype);
8529 TYPE_ALIGN (desttype) = dest_align;
8530 TYPE_USER_ALIGN (desttype) = 1;
8531 TYPE_PACKED (desttype) = 1;
8533 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8534 dest = fold_convert_loc (loc, destptype, dest);
8535 destvar = build_fold_indirect_ref_loc (loc, dest);
8538 if (srctype == desttype
8539 || (gimple_in_ssa_p (cfun)
8540 && useless_type_conversion_p (desttype, srctype)))
8542 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8543 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8544 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8545 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8546 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8548 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8549 TREE_TYPE (destvar), srcvar);
8550 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8556 if (endp == 0 || endp == 3)
8557 return omit_one_operand_loc (loc, type, dest, expr);
8563 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8566 len = fold_convert_loc (loc, sizetype, len);
8567 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8568 dest = fold_convert_loc (loc, type, dest);
8570 dest = omit_one_operand_loc (loc, type, dest, expr);
8574 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8575 If LEN is not NULL, it represents the length of the string to be
8576 copied. Return NULL_TREE if no simplification can be made. */
8579 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8583 if (!validate_arg (dest, POINTER_TYPE)
8584 || !validate_arg (src, POINTER_TYPE))
8587 /* If SRC and DEST are the same (and not volatile), return DEST. */
8588 if (operand_equal_p (src, dest, 0))
8589 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8591 if (optimize_function_for_size_p (cfun))
8594 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8600 len = c_strlen (src, 1);
8601 if (! len || TREE_SIDE_EFFECTS (len))
8605 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8606 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8607 build_call_expr_loc (loc, fn, 3, dest, src, len));
8610 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8611 Return NULL_TREE if no simplification can be made. */
8614 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8616 tree fn, len, lenp1, call, type;
8618 if (!validate_arg (dest, POINTER_TYPE)
8619 || !validate_arg (src, POINTER_TYPE))
8622 len = c_strlen (src, 1);
8624 || TREE_CODE (len) != INTEGER_CST)
8627 if (optimize_function_for_size_p (cfun)
8628 /* If length is zero it's small enough. */
8629 && !integer_zerop (len))
8632 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8636 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8637 /* We use dest twice in building our expression. Save it from
8638 multiple expansions. */
8639 dest = builtin_save_expr (dest);
8640 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8642 type = TREE_TYPE (TREE_TYPE (fndecl));
8643 len = fold_convert_loc (loc, sizetype, len);
8644 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8645 dest = fold_convert_loc (loc, type, dest);
8646 dest = omit_one_operand_loc (loc, type, dest, call);
8650 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8651 If SLEN is not NULL, it represents the length of the source string.
8652 Return NULL_TREE if no simplification can be made. */
8655 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8656 tree src, tree len, tree slen)
8660 if (!validate_arg (dest, POINTER_TYPE)
8661 || !validate_arg (src, POINTER_TYPE)
8662 || !validate_arg (len, INTEGER_TYPE))
8665 /* If the LEN parameter is zero, return DEST. */
8666 if (integer_zerop (len))
8667 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8669 /* We can't compare slen with len as constants below if len is not a
8671 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8675 slen = c_strlen (src, 1);
8677 /* Now, we must be passed a constant src ptr parameter. */
8678 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8681 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8683 /* We do not support simplification of this case, though we do
8684 support it when expanding trees into RTL. */
8685 /* FIXME: generate a call to __builtin_memset. */
8686 if (tree_int_cst_lt (slen, len))
8689 /* OK transform into builtin memcpy. */
8690 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8693 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8694 build_call_expr_loc (loc, fn, 3, dest, src, len));
8697 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8698 arguments to the call, and TYPE is its return type.
8699 Return NULL_TREE if no simplification can be made. */
8702 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8704 if (!validate_arg (arg1, POINTER_TYPE)
8705 || !validate_arg (arg2, INTEGER_TYPE)
8706 || !validate_arg (len, INTEGER_TYPE))
8712 if (TREE_CODE (arg2) != INTEGER_CST
8713 || !host_integerp (len, 1))
8716 p1 = c_getstr (arg1);
8717 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8723 if (target_char_cast (arg2, &c))
8726 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8729 return build_int_cst (TREE_TYPE (arg1), 0);
8731 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8733 return fold_convert_loc (loc, type, tem);
8739 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8740 Return NULL_TREE if no simplification can be made. */
8743 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8745 const char *p1, *p2;
8747 if (!validate_arg (arg1, POINTER_TYPE)
8748 || !validate_arg (arg2, POINTER_TYPE)
8749 || !validate_arg (len, INTEGER_TYPE))
8752 /* If the LEN parameter is zero, return zero. */
8753 if (integer_zerop (len))
8754 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8757 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8758 if (operand_equal_p (arg1, arg2, 0))
8759 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8761 p1 = c_getstr (arg1);
8762 p2 = c_getstr (arg2);
8764 /* If all arguments are constant, and the value of len is not greater
8765 than the lengths of arg1 and arg2, evaluate at compile-time. */
8766 if (host_integerp (len, 1) && p1 && p2
8767 && compare_tree_int (len, strlen (p1) + 1) <= 0
8768 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8770 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8773 return integer_one_node;
8775 return integer_minus_one_node;
8777 return integer_zero_node;
8780 /* If len parameter is one, return an expression corresponding to
8781 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8782 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8784 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8785 tree cst_uchar_ptr_node
8786 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8789 = fold_convert_loc (loc, integer_type_node,
8790 build1 (INDIRECT_REF, cst_uchar_node,
8791 fold_convert_loc (loc,
8795 = fold_convert_loc (loc, integer_type_node,
8796 build1 (INDIRECT_REF, cst_uchar_node,
8797 fold_convert_loc (loc,
8800 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8806 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8807 Return NULL_TREE if no simplification can be made. */
8810 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8812 const char *p1, *p2;
8814 if (!validate_arg (arg1, POINTER_TYPE)
8815 || !validate_arg (arg2, POINTER_TYPE))
8818 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8819 if (operand_equal_p (arg1, arg2, 0))
8820 return integer_zero_node;
8822 p1 = c_getstr (arg1);
8823 p2 = c_getstr (arg2);
8827 const int i = strcmp (p1, p2);
8829 return integer_minus_one_node;
8831 return integer_one_node;
8833 return integer_zero_node;
8836 /* If the second arg is "", return *(const unsigned char*)arg1. */
8837 if (p2 && *p2 == '\0')
8839 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8840 tree cst_uchar_ptr_node
8841 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8843 return fold_convert_loc (loc, integer_type_node,
8844 build1 (INDIRECT_REF, cst_uchar_node,
8845 fold_convert_loc (loc,
8850 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8851 if (p1 && *p1 == '\0')
8853 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8854 tree cst_uchar_ptr_node
8855 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8858 = fold_convert_loc (loc, integer_type_node,
8859 build1 (INDIRECT_REF, cst_uchar_node,
8860 fold_convert_loc (loc,
8863 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8869 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8870 Return NULL_TREE if no simplification can be made. */
8873 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8875 const char *p1, *p2;
8877 if (!validate_arg (arg1, POINTER_TYPE)
8878 || !validate_arg (arg2, POINTER_TYPE)
8879 || !validate_arg (len, INTEGER_TYPE))
8882 /* If the LEN parameter is zero, return zero. */
8883 if (integer_zerop (len))
8884 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8887 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8888 if (operand_equal_p (arg1, arg2, 0))
8889 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8891 p1 = c_getstr (arg1);
8892 p2 = c_getstr (arg2);
8894 if (host_integerp (len, 1) && p1 && p2)
8896 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8898 return integer_one_node;
8900 return integer_minus_one_node;
8902 return integer_zero_node;
8905 /* If the second arg is "", and the length is greater than zero,
8906 return *(const unsigned char*)arg1. */
8907 if (p2 && *p2 == '\0'
8908 && TREE_CODE (len) == INTEGER_CST
8909 && tree_int_cst_sgn (len) == 1)
8911 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8912 tree cst_uchar_ptr_node
8913 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8915 return fold_convert_loc (loc, integer_type_node,
8916 build1 (INDIRECT_REF, cst_uchar_node,
8917 fold_convert_loc (loc,
8922 /* If the first arg is "", and the length is greater than zero,
8923 return -*(const unsigned char*)arg2. */
8924 if (p1 && *p1 == '\0'
8925 && TREE_CODE (len) == INTEGER_CST
8926 && tree_int_cst_sgn (len) == 1)
8928 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8929 tree cst_uchar_ptr_node
8930 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8932 tree temp = fold_convert_loc (loc, integer_type_node,
8933 build1 (INDIRECT_REF, cst_uchar_node,
8934 fold_convert_loc (loc,
8937 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8940 /* If len parameter is one, return an expression corresponding to
8941 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8942 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8944 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8945 tree cst_uchar_ptr_node
8946 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8948 tree ind1 = fold_convert_loc (loc, integer_type_node,
8949 build1 (INDIRECT_REF, cst_uchar_node,
8950 fold_convert_loc (loc,
8953 tree ind2 = fold_convert_loc (loc, integer_type_node,
8954 build1 (INDIRECT_REF, cst_uchar_node,
8955 fold_convert_loc (loc,
8958 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8964 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8965 ARG. Return NULL_TREE if no simplification can be made. */
8968 fold_builtin_signbit (location_t loc, tree arg, tree type)
8972 if (!validate_arg (arg, REAL_TYPE))
8975 /* If ARG is a compile-time constant, determine the result. */
8976 if (TREE_CODE (arg) == REAL_CST
8977 && !TREE_OVERFLOW (arg))
8981 c = TREE_REAL_CST (arg);
8982 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8983 return fold_convert_loc (loc, type, temp);
8986 /* If ARG is non-negative, the result is always zero. */
8987 if (tree_expr_nonnegative_p (arg))
8988 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8990 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8991 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8992 return fold_build2_loc (loc, LT_EXPR, type, arg,
8993 build_real (TREE_TYPE (arg), dconst0));
8998 /* Fold function call to builtin copysign, copysignf or copysignl with
8999 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9003 fold_builtin_copysign (location_t loc, tree fndecl,
9004 tree arg1, tree arg2, tree type)
9008 if (!validate_arg (arg1, REAL_TYPE)
9009 || !validate_arg (arg2, REAL_TYPE))
9012 /* copysign(X,X) is X. */
9013 if (operand_equal_p (arg1, arg2, 0))
9014 return fold_convert_loc (loc, type, arg1);
9016 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9017 if (TREE_CODE (arg1) == REAL_CST
9018 && TREE_CODE (arg2) == REAL_CST
9019 && !TREE_OVERFLOW (arg1)
9020 && !TREE_OVERFLOW (arg2))
9022 REAL_VALUE_TYPE c1, c2;
9024 c1 = TREE_REAL_CST (arg1);
9025 c2 = TREE_REAL_CST (arg2);
9026 /* c1.sign := c2.sign. */
9027 real_copysign (&c1, &c2);
9028 return build_real (type, c1);
9031 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9032 Remember to evaluate Y for side-effects. */
9033 if (tree_expr_nonnegative_p (arg2))
9034 return omit_one_operand_loc (loc, type,
9035 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9038 /* Strip sign changing operations for the first argument. */
9039 tem = fold_strip_sign_ops (arg1);
9041 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9046 /* Fold a call to builtin isascii with argument ARG. */
9049 fold_builtin_isascii (location_t loc, tree arg)
9051 if (!validate_arg (arg, INTEGER_TYPE))
9055 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9056 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9057 build_int_cst (NULL_TREE,
9058 ~ (unsigned HOST_WIDE_INT) 0x7f));
9059 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9060 arg, integer_zero_node);
9064 /* Fold a call to builtin toascii with argument ARG. */
9067 fold_builtin_toascii (location_t loc, tree arg)
9069 if (!validate_arg (arg, INTEGER_TYPE))
9072 /* Transform toascii(c) -> (c & 0x7f). */
9073 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9074 build_int_cst (NULL_TREE, 0x7f));
9077 /* Fold a call to builtin isdigit with argument ARG. */
9080 fold_builtin_isdigit (location_t loc, tree arg)
9082 if (!validate_arg (arg, INTEGER_TYPE))
9086 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9087 /* According to the C standard, isdigit is unaffected by locale.
9088 However, it definitely is affected by the target character set. */
9089 unsigned HOST_WIDE_INT target_digit0
9090 = lang_hooks.to_target_charset ('0');
9092 if (target_digit0 == 0)
9095 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9096 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9097 build_int_cst (unsigned_type_node, target_digit0));
9098 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9099 build_int_cst (unsigned_type_node, 9));
9103 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9106 fold_builtin_fabs (location_t loc, tree arg, tree type)
9108 if (!validate_arg (arg, REAL_TYPE))
9111 arg = fold_convert_loc (loc, type, arg);
9112 if (TREE_CODE (arg) == REAL_CST)
9113 return fold_abs_const (arg, type);
9114 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9117 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9120 fold_builtin_abs (location_t loc, tree arg, tree type)
9122 if (!validate_arg (arg, INTEGER_TYPE))
9125 arg = fold_convert_loc (loc, type, arg);
9126 if (TREE_CODE (arg) == INTEGER_CST)
9127 return fold_abs_const (arg, type);
9128 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9131 /* Fold a call to builtin fmin or fmax. */
9134 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9135 tree type, bool max)
9137 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9139 /* Calculate the result when the argument is a constant. */
9140 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9145 /* If either argument is NaN, return the other one. Avoid the
9146 transformation if we get (and honor) a signalling NaN. Using
9147 omit_one_operand() ensures we create a non-lvalue. */
9148 if (TREE_CODE (arg0) == REAL_CST
9149 && real_isnan (&TREE_REAL_CST (arg0))
9150 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9151 || ! TREE_REAL_CST (arg0).signalling))
9152 return omit_one_operand_loc (loc, type, arg1, arg0);
9153 if (TREE_CODE (arg1) == REAL_CST
9154 && real_isnan (&TREE_REAL_CST (arg1))
9155 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9156 || ! TREE_REAL_CST (arg1).signalling))
9157 return omit_one_operand_loc (loc, type, arg0, arg1);
9159 /* Transform fmin/fmax(x,x) -> x. */
9160 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9161 return omit_one_operand_loc (loc, type, arg0, arg1);
9163 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9164 functions to return the numeric arg if the other one is NaN.
9165 These tree codes don't honor that, so only transform if
9166 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9167 handled, so we don't have to worry about it either. */
9168 if (flag_finite_math_only)
9169 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9170 fold_convert_loc (loc, type, arg0),
9171 fold_convert_loc (loc, type, arg1));
9176 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9179 fold_builtin_carg (location_t loc, tree arg, tree type)
9181 if (validate_arg (arg, COMPLEX_TYPE)
9182 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9184 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9188 tree new_arg = builtin_save_expr (arg);
9189 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9190 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9191 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9198 /* Fold a call to builtin logb/ilogb. */
9201 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9203 if (! validate_arg (arg, REAL_TYPE))
9208 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9210 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9216 /* If arg is Inf or NaN and we're logb, return it. */
9217 if (TREE_CODE (rettype) == REAL_TYPE)
9218 return fold_convert_loc (loc, rettype, arg);
9219 /* Fall through... */
9221 /* Zero may set errno and/or raise an exception for logb, also
9222 for ilogb we don't know FP_ILOGB0. */
9225 /* For normal numbers, proceed iff radix == 2. In GCC,
9226 normalized significands are in the range [0.5, 1.0). We
9227 want the exponent as if they were [1.0, 2.0) so get the
9228 exponent and subtract 1. */
9229 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9230 return fold_convert_loc (loc, rettype,
9231 build_int_cst (NULL_TREE,
9232 REAL_EXP (value)-1));
9240 /* Fold a call to builtin significand, if radix == 2. */
9243 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9245 if (! validate_arg (arg, REAL_TYPE))
9250 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9252 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9259 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9260 return fold_convert_loc (loc, rettype, arg);
9262 /* For normal numbers, proceed iff radix == 2. */
9263 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9265 REAL_VALUE_TYPE result = *value;
9266 /* In GCC, normalized significands are in the range [0.5,
9267 1.0). We want them to be [1.0, 2.0) so set the
9269 SET_REAL_EXP (&result, 1);
9270 return build_real (rettype, result);
9279 /* Fold a call to builtin frexp, we can assume the base is 2. */
9282 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9284 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9289 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9292 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9294 /* Proceed if a valid pointer type was passed in. */
9295 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9297 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9303 /* For +-0, return (*exp = 0, +-0). */
9304 exp = integer_zero_node;
9309 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9310 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9313 /* Since the frexp function always expects base 2, and in
9314 GCC normalized significands are already in the range
9315 [0.5, 1.0), we have exactly what frexp wants. */
9316 REAL_VALUE_TYPE frac_rvt = *value;
9317 SET_REAL_EXP (&frac_rvt, 0);
9318 frac = build_real (rettype, frac_rvt);
9319 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9326 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9327 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9328 TREE_SIDE_EFFECTS (arg1) = 1;
9329 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9335 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9336 then we can assume the base is two. If it's false, then we have to
9337 check the mode of the TYPE parameter in certain cases. */
9340 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9341 tree type, bool ldexp)
9343 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9348 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9349 if (real_zerop (arg0) || integer_zerop (arg1)
9350 || (TREE_CODE (arg0) == REAL_CST
9351 && !real_isfinite (&TREE_REAL_CST (arg0))))
9352 return omit_one_operand_loc (loc, type, arg0, arg1);
9354 /* If both arguments are constant, then try to evaluate it. */
9355 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9356 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9357 && host_integerp (arg1, 0))
9359 /* Bound the maximum adjustment to twice the range of the
9360 mode's valid exponents. Use abs to ensure the range is
9361 positive as a sanity check. */
9362 const long max_exp_adj = 2 *
9363 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9364 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9366 /* Get the user-requested adjustment. */
9367 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9369 /* The requested adjustment must be inside this range. This
9370 is a preliminary cap to avoid things like overflow, we
9371 may still fail to compute the result for other reasons. */
9372 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9374 REAL_VALUE_TYPE initial_result;
9376 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9378 /* Ensure we didn't overflow. */
9379 if (! real_isinf (&initial_result))
9381 const REAL_VALUE_TYPE trunc_result
9382 = real_value_truncate (TYPE_MODE (type), initial_result);
9384 /* Only proceed if the target mode can hold the
9386 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9387 return build_real (type, trunc_result);
9396 /* Fold a call to builtin modf. */
9399 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9401 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9406 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9409 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9411 /* Proceed if a valid pointer type was passed in. */
9412 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9414 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9415 REAL_VALUE_TYPE trunc, frac;
9421 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9422 trunc = frac = *value;
9425 /* For +-Inf, return (*arg1 = arg0, +-0). */
9427 frac.sign = value->sign;
9431 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9432 real_trunc (&trunc, VOIDmode, value);
9433 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9434 /* If the original number was negative and already
9435 integral, then the fractional part is -0.0. */
9436 if (value->sign && frac.cl == rvc_zero)
9437 frac.sign = value->sign;
9441 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9442 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9443 build_real (rettype, trunc));
9444 TREE_SIDE_EFFECTS (arg1) = 1;
9445 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9446 build_real (rettype, frac));
9452 /* Given a location LOC, an interclass builtin function decl FNDECL
9453 and its single argument ARG, return an folded expression computing
9454 the same, or NULL_TREE if we either couldn't or didn't want to fold
9455 (the latter happen if there's an RTL instruction available). */
9458 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9460 enum machine_mode mode;
9462 if (!validate_arg (arg, REAL_TYPE))
9465 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9468 mode = TYPE_MODE (TREE_TYPE (arg));
9470 /* If there is no optab, try generic code. */
9471 switch (DECL_FUNCTION_CODE (fndecl))
9475 CASE_FLT_FN (BUILT_IN_ISINF):
9477 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9478 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9479 tree const type = TREE_TYPE (arg);
9483 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9484 real_from_string (&r, buf);
9485 result = build_call_expr (isgr_fn, 2,
9486 fold_build1_loc (loc, ABS_EXPR, type, arg),
9487 build_real (type, r));
9490 CASE_FLT_FN (BUILT_IN_FINITE):
9491 case BUILT_IN_ISFINITE:
9493 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9494 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9495 tree const type = TREE_TYPE (arg);
9499 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9500 real_from_string (&r, buf);
9501 result = build_call_expr (isle_fn, 2,
9502 fold_build1_loc (loc, ABS_EXPR, type, arg),
9503 build_real (type, r));
9504 /*result = fold_build2_loc (loc, UNGT_EXPR,
9505 TREE_TYPE (TREE_TYPE (fndecl)),
9506 fold_build1_loc (loc, ABS_EXPR, type, arg),
9507 build_real (type, r));
9508 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9509 TREE_TYPE (TREE_TYPE (fndecl)),
9513 case BUILT_IN_ISNORMAL:
9515 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9516 islessequal(fabs(x),DBL_MAX). */
9517 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9518 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9519 tree const type = TREE_TYPE (arg);
9520 REAL_VALUE_TYPE rmax, rmin;
9523 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9524 real_from_string (&rmax, buf);
9525 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9526 real_from_string (&rmin, buf);
9527 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9528 result = build_call_expr (isle_fn, 2, arg,
9529 build_real (type, rmax));
9530 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9531 build_call_expr (isge_fn, 2, arg,
9532 build_real (type, rmin)));
9542 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9543 ARG is the argument for the call. */
9546 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9548 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9551 if (!validate_arg (arg, REAL_TYPE))
9554 switch (builtin_index)
9556 case BUILT_IN_ISINF:
9557 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9558 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9560 if (TREE_CODE (arg) == REAL_CST)
9562 r = TREE_REAL_CST (arg);
9563 if (real_isinf (&r))
9564 return real_compare (GT_EXPR, &r, &dconst0)
9565 ? integer_one_node : integer_minus_one_node;
9567 return integer_zero_node;
9572 case BUILT_IN_ISINF_SIGN:
9574 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9575 /* In a boolean context, GCC will fold the inner COND_EXPR to
9576 1. So e.g. "if (isinf_sign(x))" would be folded to just
9577 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9578 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9579 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9580 tree tmp = NULL_TREE;
9582 arg = builtin_save_expr (arg);
9584 if (signbit_fn && isinf_fn)
9586 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9587 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9589 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9590 signbit_call, integer_zero_node);
9591 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9592 isinf_call, integer_zero_node);
9594 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9595 integer_minus_one_node, integer_one_node);
9596 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9604 case BUILT_IN_ISFINITE:
9605 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9606 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9607 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9609 if (TREE_CODE (arg) == REAL_CST)
9611 r = TREE_REAL_CST (arg);
9612 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9617 case BUILT_IN_ISNAN:
9618 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9619 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9621 if (TREE_CODE (arg) == REAL_CST)
9623 r = TREE_REAL_CST (arg);
9624 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9627 arg = builtin_save_expr (arg);
9628 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9635 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9636 This builtin will generate code to return the appropriate floating
9637 point classification depending on the value of the floating point
9638 number passed in. The possible return values must be supplied as
9639 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9640 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9641 one floating point argument which is "type generic". */
9644 fold_builtin_fpclassify (location_t loc, tree exp)
9646 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9647 arg, type, res, tmp;
9648 enum machine_mode mode;
9652 /* Verify the required arguments in the original call. */
9653 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9654 INTEGER_TYPE, INTEGER_TYPE,
9655 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9658 fp_nan = CALL_EXPR_ARG (exp, 0);
9659 fp_infinite = CALL_EXPR_ARG (exp, 1);
9660 fp_normal = CALL_EXPR_ARG (exp, 2);
9661 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9662 fp_zero = CALL_EXPR_ARG (exp, 4);
9663 arg = CALL_EXPR_ARG (exp, 5);
9664 type = TREE_TYPE (arg);
9665 mode = TYPE_MODE (type);
9666 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9670 (fabs(x) == Inf ? FP_INFINITE :
9671 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9672 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9674 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9675 build_real (type, dconst0));
9676 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9677 tmp, fp_zero, fp_subnormal);
9679 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9680 real_from_string (&r, buf);
9681 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9682 arg, build_real (type, r));
9683 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9685 if (HONOR_INFINITIES (mode))
9688 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9689 build_real (type, r));
9690 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9694 if (HONOR_NANS (mode))
9696 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9697 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9703 /* Fold a call to an unordered comparison function such as
9704 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9705 being called and ARG0 and ARG1 are the arguments for the call.
9706 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9707 the opposite of the desired result. UNORDERED_CODE is used
9708 for modes that can hold NaNs and ORDERED_CODE is used for
9712 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9713 enum tree_code unordered_code,
9714 enum tree_code ordered_code)
9716 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9717 enum tree_code code;
9719 enum tree_code code0, code1;
9720 tree cmp_type = NULL_TREE;
9722 type0 = TREE_TYPE (arg0);
9723 type1 = TREE_TYPE (arg1);
9725 code0 = TREE_CODE (type0);
9726 code1 = TREE_CODE (type1);
9728 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9729 /* Choose the wider of two real types. */
9730 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9732 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9734 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9737 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9738 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9740 if (unordered_code == UNORDERED_EXPR)
9742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9743 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9744 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9747 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9749 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9750 fold_build2_loc (loc, code, type, arg0, arg1));
9753 /* Fold a call to built-in function FNDECL with 0 arguments.
9754 IGNORE is true if the result of the function call is ignored. This
9755 function returns NULL_TREE if no simplification was possible. */
9758 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9760 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9761 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9764 CASE_FLT_FN (BUILT_IN_INF):
9765 case BUILT_IN_INFD32:
9766 case BUILT_IN_INFD64:
9767 case BUILT_IN_INFD128:
9768 return fold_builtin_inf (loc, type, true);
9770 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9771 return fold_builtin_inf (loc, type, false);
9773 case BUILT_IN_CLASSIFY_TYPE:
9774 return fold_builtin_classify_type (NULL_TREE);
9782 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9783 IGNORE is true if the result of the function call is ignored. This
9784 function returns NULL_TREE if no simplification was possible. */
9787 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9789 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9790 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9794 case BUILT_IN_CONSTANT_P:
9796 tree val = fold_builtin_constant_p (arg0);
9798 /* Gimplification will pull the CALL_EXPR for the builtin out of
9799 an if condition. When not optimizing, we'll not CSE it back.
9800 To avoid link error types of regressions, return false now. */
9801 if (!val && !optimize)
9802 val = integer_zero_node;
9807 case BUILT_IN_CLASSIFY_TYPE:
9808 return fold_builtin_classify_type (arg0);
9810 case BUILT_IN_STRLEN:
9811 return fold_builtin_strlen (loc, type, arg0);
9813 CASE_FLT_FN (BUILT_IN_FABS):
9814 return fold_builtin_fabs (loc, arg0, type);
9818 case BUILT_IN_LLABS:
9819 case BUILT_IN_IMAXABS:
9820 return fold_builtin_abs (loc, arg0, type);
9822 CASE_FLT_FN (BUILT_IN_CONJ):
9823 if (validate_arg (arg0, COMPLEX_TYPE)
9824 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9825 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9828 CASE_FLT_FN (BUILT_IN_CREAL):
9829 if (validate_arg (arg0, COMPLEX_TYPE)
9830 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9831 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9834 CASE_FLT_FN (BUILT_IN_CIMAG):
9835 if (validate_arg (arg0, COMPLEX_TYPE)
9836 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9837 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9840 CASE_FLT_FN (BUILT_IN_CCOS):
9841 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9843 CASE_FLT_FN (BUILT_IN_CCOSH):
9844 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9846 CASE_FLT_FN (BUILT_IN_CPROJ):
9847 return fold_builtin_cproj(loc, arg0, type);
9849 CASE_FLT_FN (BUILT_IN_CSIN):
9850 if (validate_arg (arg0, COMPLEX_TYPE)
9851 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9852 return do_mpc_arg1 (arg0, type, mpc_sin);
9855 CASE_FLT_FN (BUILT_IN_CSINH):
9856 if (validate_arg (arg0, COMPLEX_TYPE)
9857 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9858 return do_mpc_arg1 (arg0, type, mpc_sinh);
9861 CASE_FLT_FN (BUILT_IN_CTAN):
9862 if (validate_arg (arg0, COMPLEX_TYPE)
9863 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9864 return do_mpc_arg1 (arg0, type, mpc_tan);
9867 CASE_FLT_FN (BUILT_IN_CTANH):
9868 if (validate_arg (arg0, COMPLEX_TYPE)
9869 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9870 return do_mpc_arg1 (arg0, type, mpc_tanh);
9873 CASE_FLT_FN (BUILT_IN_CLOG):
9874 if (validate_arg (arg0, COMPLEX_TYPE)
9875 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9876 return do_mpc_arg1 (arg0, type, mpc_log);
9879 CASE_FLT_FN (BUILT_IN_CSQRT):
9880 if (validate_arg (arg0, COMPLEX_TYPE)
9881 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9882 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9885 CASE_FLT_FN (BUILT_IN_CASIN):
9886 if (validate_arg (arg0, COMPLEX_TYPE)
9887 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9888 return do_mpc_arg1 (arg0, type, mpc_asin);
9891 CASE_FLT_FN (BUILT_IN_CACOS):
9892 if (validate_arg (arg0, COMPLEX_TYPE)
9893 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9894 return do_mpc_arg1 (arg0, type, mpc_acos);
9897 CASE_FLT_FN (BUILT_IN_CATAN):
9898 if (validate_arg (arg0, COMPLEX_TYPE)
9899 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9900 return do_mpc_arg1 (arg0, type, mpc_atan);
9903 CASE_FLT_FN (BUILT_IN_CASINH):
9904 if (validate_arg (arg0, COMPLEX_TYPE)
9905 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9906 return do_mpc_arg1 (arg0, type, mpc_asinh);
9909 CASE_FLT_FN (BUILT_IN_CACOSH):
9910 if (validate_arg (arg0, COMPLEX_TYPE)
9911 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9912 return do_mpc_arg1 (arg0, type, mpc_acosh);
9915 CASE_FLT_FN (BUILT_IN_CATANH):
9916 if (validate_arg (arg0, COMPLEX_TYPE)
9917 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9918 return do_mpc_arg1 (arg0, type, mpc_atanh);
9921 CASE_FLT_FN (BUILT_IN_CABS):
9922 return fold_builtin_cabs (loc, arg0, type, fndecl);
9924 CASE_FLT_FN (BUILT_IN_CARG):
9925 return fold_builtin_carg (loc, arg0, type);
9927 CASE_FLT_FN (BUILT_IN_SQRT):
9928 return fold_builtin_sqrt (loc, arg0, type);
9930 CASE_FLT_FN (BUILT_IN_CBRT):
9931 return fold_builtin_cbrt (loc, arg0, type);
9933 CASE_FLT_FN (BUILT_IN_ASIN):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9936 &dconstm1, &dconst1, true);
9939 CASE_FLT_FN (BUILT_IN_ACOS):
9940 if (validate_arg (arg0, REAL_TYPE))
9941 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9942 &dconstm1, &dconst1, true);
9945 CASE_FLT_FN (BUILT_IN_ATAN):
9946 if (validate_arg (arg0, REAL_TYPE))
9947 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9950 CASE_FLT_FN (BUILT_IN_ASINH):
9951 if (validate_arg (arg0, REAL_TYPE))
9952 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9955 CASE_FLT_FN (BUILT_IN_ACOSH):
9956 if (validate_arg (arg0, REAL_TYPE))
9957 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9958 &dconst1, NULL, true);
9961 CASE_FLT_FN (BUILT_IN_ATANH):
9962 if (validate_arg (arg0, REAL_TYPE))
9963 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9964 &dconstm1, &dconst1, false);
9967 CASE_FLT_FN (BUILT_IN_SIN):
9968 if (validate_arg (arg0, REAL_TYPE))
9969 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9972 CASE_FLT_FN (BUILT_IN_COS):
9973 return fold_builtin_cos (loc, arg0, type, fndecl);
9975 CASE_FLT_FN (BUILT_IN_TAN):
9976 return fold_builtin_tan (arg0, type);
9978 CASE_FLT_FN (BUILT_IN_CEXP):
9979 return fold_builtin_cexp (loc, arg0, type);
9981 CASE_FLT_FN (BUILT_IN_CEXPI):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9986 CASE_FLT_FN (BUILT_IN_SINH):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9991 CASE_FLT_FN (BUILT_IN_COSH):
9992 return fold_builtin_cosh (loc, arg0, type, fndecl);
9994 CASE_FLT_FN (BUILT_IN_TANH):
9995 if (validate_arg (arg0, REAL_TYPE))
9996 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9999 CASE_FLT_FN (BUILT_IN_ERF):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10004 CASE_FLT_FN (BUILT_IN_ERFC):
10005 if (validate_arg (arg0, REAL_TYPE))
10006 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10009 CASE_FLT_FN (BUILT_IN_TGAMMA):
10010 if (validate_arg (arg0, REAL_TYPE))
10011 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10014 CASE_FLT_FN (BUILT_IN_EXP):
10015 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10017 CASE_FLT_FN (BUILT_IN_EXP2):
10018 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10020 CASE_FLT_FN (BUILT_IN_EXP10):
10021 CASE_FLT_FN (BUILT_IN_POW10):
10022 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10024 CASE_FLT_FN (BUILT_IN_EXPM1):
10025 if (validate_arg (arg0, REAL_TYPE))
10026 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10029 CASE_FLT_FN (BUILT_IN_LOG):
10030 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10032 CASE_FLT_FN (BUILT_IN_LOG2):
10033 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10035 CASE_FLT_FN (BUILT_IN_LOG10):
10036 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10038 CASE_FLT_FN (BUILT_IN_LOG1P):
10039 if (validate_arg (arg0, REAL_TYPE))
10040 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10041 &dconstm1, NULL, false);
10044 CASE_FLT_FN (BUILT_IN_J0):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10050 CASE_FLT_FN (BUILT_IN_J1):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10056 CASE_FLT_FN (BUILT_IN_Y0):
10057 if (validate_arg (arg0, REAL_TYPE))
10058 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10059 &dconst0, NULL, false);
10062 CASE_FLT_FN (BUILT_IN_Y1):
10063 if (validate_arg (arg0, REAL_TYPE))
10064 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10065 &dconst0, NULL, false);
10068 CASE_FLT_FN (BUILT_IN_NAN):
10069 case BUILT_IN_NAND32:
10070 case BUILT_IN_NAND64:
10071 case BUILT_IN_NAND128:
10072 return fold_builtin_nan (arg0, type, true);
10074 CASE_FLT_FN (BUILT_IN_NANS):
10075 return fold_builtin_nan (arg0, type, false);
10077 CASE_FLT_FN (BUILT_IN_FLOOR):
10078 return fold_builtin_floor (loc, fndecl, arg0);
10080 CASE_FLT_FN (BUILT_IN_CEIL):
10081 return fold_builtin_ceil (loc, fndecl, arg0);
10083 CASE_FLT_FN (BUILT_IN_TRUNC):
10084 return fold_builtin_trunc (loc, fndecl, arg0);
10086 CASE_FLT_FN (BUILT_IN_ROUND):
10087 return fold_builtin_round (loc, fndecl, arg0);
10089 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10090 CASE_FLT_FN (BUILT_IN_RINT):
10091 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10093 CASE_FLT_FN (BUILT_IN_LCEIL):
10094 CASE_FLT_FN (BUILT_IN_LLCEIL):
10095 CASE_FLT_FN (BUILT_IN_LFLOOR):
10096 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10097 CASE_FLT_FN (BUILT_IN_LROUND):
10098 CASE_FLT_FN (BUILT_IN_LLROUND):
10099 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10101 CASE_FLT_FN (BUILT_IN_LRINT):
10102 CASE_FLT_FN (BUILT_IN_LLRINT):
10103 return fold_fixed_mathfn (loc, fndecl, arg0);
10105 case BUILT_IN_BSWAP32:
10106 case BUILT_IN_BSWAP64:
10107 return fold_builtin_bswap (fndecl, arg0);
10109 CASE_INT_FN (BUILT_IN_FFS):
10110 CASE_INT_FN (BUILT_IN_CLZ):
10111 CASE_INT_FN (BUILT_IN_CTZ):
10112 CASE_INT_FN (BUILT_IN_POPCOUNT):
10113 CASE_INT_FN (BUILT_IN_PARITY):
10114 return fold_builtin_bitop (fndecl, arg0);
10116 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10117 return fold_builtin_signbit (loc, arg0, type);
10119 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10120 return fold_builtin_significand (loc, arg0, type);
10122 CASE_FLT_FN (BUILT_IN_ILOGB):
10123 CASE_FLT_FN (BUILT_IN_LOGB):
10124 return fold_builtin_logb (loc, arg0, type);
10126 case BUILT_IN_ISASCII:
10127 return fold_builtin_isascii (loc, arg0);
10129 case BUILT_IN_TOASCII:
10130 return fold_builtin_toascii (loc, arg0);
10132 case BUILT_IN_ISDIGIT:
10133 return fold_builtin_isdigit (loc, arg0);
10135 CASE_FLT_FN (BUILT_IN_FINITE):
10136 case BUILT_IN_FINITED32:
10137 case BUILT_IN_FINITED64:
10138 case BUILT_IN_FINITED128:
10139 case BUILT_IN_ISFINITE:
10141 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10144 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10147 CASE_FLT_FN (BUILT_IN_ISINF):
10148 case BUILT_IN_ISINFD32:
10149 case BUILT_IN_ISINFD64:
10150 case BUILT_IN_ISINFD128:
10152 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10155 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10158 case BUILT_IN_ISNORMAL:
10159 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10161 case BUILT_IN_ISINF_SIGN:
10162 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10164 CASE_FLT_FN (BUILT_IN_ISNAN):
10165 case BUILT_IN_ISNAND32:
10166 case BUILT_IN_ISNAND64:
10167 case BUILT_IN_ISNAND128:
10168 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10170 case BUILT_IN_PRINTF:
10171 case BUILT_IN_PRINTF_UNLOCKED:
10172 case BUILT_IN_VPRINTF:
10173 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10183 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10184 IGNORE is true if the result of the function call is ignored. This
10185 function returns NULL_TREE if no simplification was possible. */
10188 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10190 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10195 CASE_FLT_FN (BUILT_IN_JN):
10196 if (validate_arg (arg0, INTEGER_TYPE)
10197 && validate_arg (arg1, REAL_TYPE))
10198 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10201 CASE_FLT_FN (BUILT_IN_YN):
10202 if (validate_arg (arg0, INTEGER_TYPE)
10203 && validate_arg (arg1, REAL_TYPE))
10204 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10208 CASE_FLT_FN (BUILT_IN_DREM):
10209 CASE_FLT_FN (BUILT_IN_REMAINDER):
10210 if (validate_arg (arg0, REAL_TYPE)
10211 && validate_arg(arg1, REAL_TYPE))
10212 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10215 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10216 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10217 if (validate_arg (arg0, REAL_TYPE)
10218 && validate_arg(arg1, POINTER_TYPE))
10219 return do_mpfr_lgamma_r (arg0, arg1, type);
10222 CASE_FLT_FN (BUILT_IN_ATAN2):
10223 if (validate_arg (arg0, REAL_TYPE)
10224 && validate_arg(arg1, REAL_TYPE))
10225 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10228 CASE_FLT_FN (BUILT_IN_FDIM):
10229 if (validate_arg (arg0, REAL_TYPE)
10230 && validate_arg(arg1, REAL_TYPE))
10231 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10234 CASE_FLT_FN (BUILT_IN_HYPOT):
10235 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10237 CASE_FLT_FN (BUILT_IN_CPOW):
10238 if (validate_arg (arg0, COMPLEX_TYPE)
10239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10240 && validate_arg (arg1, COMPLEX_TYPE)
10241 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10242 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10245 CASE_FLT_FN (BUILT_IN_LDEXP):
10246 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10247 CASE_FLT_FN (BUILT_IN_SCALBN):
10248 CASE_FLT_FN (BUILT_IN_SCALBLN):
10249 return fold_builtin_load_exponent (loc, arg0, arg1,
10250 type, /*ldexp=*/false);
10252 CASE_FLT_FN (BUILT_IN_FREXP):
10253 return fold_builtin_frexp (loc, arg0, arg1, type);
10255 CASE_FLT_FN (BUILT_IN_MODF):
10256 return fold_builtin_modf (loc, arg0, arg1, type);
10258 case BUILT_IN_BZERO:
10259 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10261 case BUILT_IN_FPUTS:
10262 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10264 case BUILT_IN_FPUTS_UNLOCKED:
10265 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10267 case BUILT_IN_STRSTR:
10268 return fold_builtin_strstr (loc, arg0, arg1, type);
10270 case BUILT_IN_STRCAT:
10271 return fold_builtin_strcat (loc, arg0, arg1);
10273 case BUILT_IN_STRSPN:
10274 return fold_builtin_strspn (loc, arg0, arg1);
10276 case BUILT_IN_STRCSPN:
10277 return fold_builtin_strcspn (loc, arg0, arg1);
10279 case BUILT_IN_STRCHR:
10280 case BUILT_IN_INDEX:
10281 return fold_builtin_strchr (loc, arg0, arg1, type);
10283 case BUILT_IN_STRRCHR:
10284 case BUILT_IN_RINDEX:
10285 return fold_builtin_strrchr (loc, arg0, arg1, type);
10287 case BUILT_IN_STRCPY:
10288 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10290 case BUILT_IN_STPCPY:
10293 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10297 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10300 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10303 case BUILT_IN_STRCMP:
10304 return fold_builtin_strcmp (loc, arg0, arg1);
10306 case BUILT_IN_STRPBRK:
10307 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10309 case BUILT_IN_EXPECT:
10310 return fold_builtin_expect (loc, arg0, arg1);
10312 CASE_FLT_FN (BUILT_IN_POW):
10313 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10315 CASE_FLT_FN (BUILT_IN_POWI):
10316 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10318 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10319 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10321 CASE_FLT_FN (BUILT_IN_FMIN):
10322 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10324 CASE_FLT_FN (BUILT_IN_FMAX):
10325 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10327 case BUILT_IN_ISGREATER:
10328 return fold_builtin_unordered_cmp (loc, fndecl,
10329 arg0, arg1, UNLE_EXPR, LE_EXPR);
10330 case BUILT_IN_ISGREATEREQUAL:
10331 return fold_builtin_unordered_cmp (loc, fndecl,
10332 arg0, arg1, UNLT_EXPR, LT_EXPR);
10333 case BUILT_IN_ISLESS:
10334 return fold_builtin_unordered_cmp (loc, fndecl,
10335 arg0, arg1, UNGE_EXPR, GE_EXPR);
10336 case BUILT_IN_ISLESSEQUAL:
10337 return fold_builtin_unordered_cmp (loc, fndecl,
10338 arg0, arg1, UNGT_EXPR, GT_EXPR);
10339 case BUILT_IN_ISLESSGREATER:
10340 return fold_builtin_unordered_cmp (loc, fndecl,
10341 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10342 case BUILT_IN_ISUNORDERED:
10343 return fold_builtin_unordered_cmp (loc, fndecl,
10344 arg0, arg1, UNORDERED_EXPR,
10347 /* We do the folding for va_start in the expander. */
10348 case BUILT_IN_VA_START:
10351 case BUILT_IN_SPRINTF:
10352 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10354 case BUILT_IN_OBJECT_SIZE:
10355 return fold_builtin_object_size (arg0, arg1);
10357 case BUILT_IN_PRINTF:
10358 case BUILT_IN_PRINTF_UNLOCKED:
10359 case BUILT_IN_VPRINTF:
10360 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10362 case BUILT_IN_PRINTF_CHK:
10363 case BUILT_IN_VPRINTF_CHK:
10364 if (!validate_arg (arg0, INTEGER_TYPE)
10365 || TREE_SIDE_EFFECTS (arg0))
10368 return fold_builtin_printf (loc, fndecl,
10369 arg1, NULL_TREE, ignore, fcode);
10372 case BUILT_IN_FPRINTF:
10373 case BUILT_IN_FPRINTF_UNLOCKED:
10374 case BUILT_IN_VFPRINTF:
10375 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10384 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10385 and ARG2. IGNORE is true if the result of the function call is ignored.
10386 This function returns NULL_TREE if no simplification was possible. */
10389 fold_builtin_3 (location_t loc, tree fndecl,
10390 tree arg0, tree arg1, tree arg2, bool ignore)
10392 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10393 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10397 CASE_FLT_FN (BUILT_IN_SINCOS):
10398 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10400 CASE_FLT_FN (BUILT_IN_FMA):
10401 if (validate_arg (arg0, REAL_TYPE)
10402 && validate_arg(arg1, REAL_TYPE)
10403 && validate_arg(arg2, REAL_TYPE))
10404 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10407 CASE_FLT_FN (BUILT_IN_REMQUO):
10408 if (validate_arg (arg0, REAL_TYPE)
10409 && validate_arg(arg1, REAL_TYPE)
10410 && validate_arg(arg2, POINTER_TYPE))
10411 return do_mpfr_remquo (arg0, arg1, arg2);
10414 case BUILT_IN_MEMSET:
10415 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10417 case BUILT_IN_BCOPY:
10418 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10419 void_type_node, true, /*endp=*/3);
10421 case BUILT_IN_MEMCPY:
10422 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10423 type, ignore, /*endp=*/0);
10425 case BUILT_IN_MEMPCPY:
10426 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10427 type, ignore, /*endp=*/1);
10429 case BUILT_IN_MEMMOVE:
10430 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10431 type, ignore, /*endp=*/3);
10433 case BUILT_IN_STRNCAT:
10434 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10436 case BUILT_IN_STRNCPY:
10437 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10439 case BUILT_IN_STRNCMP:
10440 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10442 case BUILT_IN_MEMCHR:
10443 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10445 case BUILT_IN_BCMP:
10446 case BUILT_IN_MEMCMP:
10447 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10449 case BUILT_IN_SPRINTF:
10450 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10452 case BUILT_IN_STRCPY_CHK:
10453 case BUILT_IN_STPCPY_CHK:
10454 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10457 case BUILT_IN_STRCAT_CHK:
10458 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10460 case BUILT_IN_PRINTF_CHK:
10461 case BUILT_IN_VPRINTF_CHK:
10462 if (!validate_arg (arg0, INTEGER_TYPE)
10463 || TREE_SIDE_EFFECTS (arg0))
10466 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10469 case BUILT_IN_FPRINTF:
10470 case BUILT_IN_FPRINTF_UNLOCKED:
10471 case BUILT_IN_VFPRINTF:
10472 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10475 case BUILT_IN_FPRINTF_CHK:
10476 case BUILT_IN_VFPRINTF_CHK:
10477 if (!validate_arg (arg1, INTEGER_TYPE)
10478 || TREE_SIDE_EFFECTS (arg1))
10481 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10490 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10491 ARG2, and ARG3. IGNORE is true if the result of the function call is
10492 ignored. This function returns NULL_TREE if no simplification was
10496 fold_builtin_4 (location_t loc, tree fndecl,
10497 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10499 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10503 case BUILT_IN_MEMCPY_CHK:
10504 case BUILT_IN_MEMPCPY_CHK:
10505 case BUILT_IN_MEMMOVE_CHK:
10506 case BUILT_IN_MEMSET_CHK:
10507 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10509 DECL_FUNCTION_CODE (fndecl));
10511 case BUILT_IN_STRNCPY_CHK:
10512 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10514 case BUILT_IN_STRNCAT_CHK:
10515 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10517 case BUILT_IN_FPRINTF_CHK:
10518 case BUILT_IN_VFPRINTF_CHK:
10519 if (!validate_arg (arg1, INTEGER_TYPE)
10520 || TREE_SIDE_EFFECTS (arg1))
10523 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10533 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10534 arguments, where NARGS <= 4. IGNORE is true if the result of the
10535 function call is ignored. This function returns NULL_TREE if no
10536 simplification was possible. Note that this only folds builtins with
10537 fixed argument patterns. Foldings that do varargs-to-varargs
10538 transformations, or that match calls with more than 4 arguments,
10539 need to be handled with fold_builtin_varargs instead. */
10541 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10544 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10546 tree ret = NULL_TREE;
10551 ret = fold_builtin_0 (loc, fndecl, ignore);
10554 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10557 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10560 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10563 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10571 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10572 SET_EXPR_LOCATION (ret, loc);
10573 TREE_NO_WARNING (ret) = 1;
10579 /* Builtins with folding operations that operate on "..." arguments
10580 need special handling; we need to store the arguments in a convenient
10581 data structure before attempting any folding. Fortunately there are
10582 only a few builtins that fall into this category. FNDECL is the
10583 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10584 result of the function call is ignored. */
10587 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10588 bool ignore ATTRIBUTE_UNUSED)
10590 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10591 tree ret = NULL_TREE;
10595 case BUILT_IN_SPRINTF_CHK:
10596 case BUILT_IN_VSPRINTF_CHK:
10597 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10600 case BUILT_IN_SNPRINTF_CHK:
10601 case BUILT_IN_VSNPRINTF_CHK:
10602 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10605 case BUILT_IN_FPCLASSIFY:
10606 ret = fold_builtin_fpclassify (loc, exp);
10614 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10615 SET_EXPR_LOCATION (ret, loc);
10616 TREE_NO_WARNING (ret) = 1;
10622 /* Return true if FNDECL shouldn't be folded right now.
10623 If a built-in function has an inline attribute always_inline
10624 wrapper, defer folding it after always_inline functions have
10625 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10626 might not be performed. */
10629 avoid_folding_inline_builtin (tree fndecl)
10631 return (DECL_DECLARED_INLINE_P (fndecl)
10632 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10634 && !cfun->always_inline_functions_inlined
10635 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10638 /* A wrapper function for builtin folding that prevents warnings for
10639 "statement without effect" and the like, caused by removing the
10640 call node earlier than the warning is generated. */
10643 fold_call_expr (location_t loc, tree exp, bool ignore)
10645 tree ret = NULL_TREE;
10646 tree fndecl = get_callee_fndecl (exp);
10648 && TREE_CODE (fndecl) == FUNCTION_DECL
10649 && DECL_BUILT_IN (fndecl)
10650 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10651 yet. Defer folding until we see all the arguments
10652 (after inlining). */
10653 && !CALL_EXPR_VA_ARG_PACK (exp))
10655 int nargs = call_expr_nargs (exp);
10657 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10658 instead last argument is __builtin_va_arg_pack (). Defer folding
10659 even in that case, until arguments are finalized. */
10660 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10662 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10664 && TREE_CODE (fndecl2) == FUNCTION_DECL
10665 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10666 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10670 if (avoid_folding_inline_builtin (fndecl))
10673 /* FIXME: Don't use a list in this interface. */
10674 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10675 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10678 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10680 tree *args = CALL_EXPR_ARGP (exp);
10681 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10684 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10692 /* Conveniently construct a function call expression. FNDECL names the
10693 function to be called and ARGLIST is a TREE_LIST of arguments. */
10696 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10698 tree fntype = TREE_TYPE (fndecl);
10699 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10700 int n = list_length (arglist);
10701 tree *argarray = (tree *) alloca (n * sizeof (tree));
10704 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10705 argarray[i] = TREE_VALUE (arglist);
10706 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10709 /* Conveniently construct a function call expression. FNDECL names the
10710 function to be called, N is the number of arguments, and the "..."
10711 parameters are the argument expressions. */
10714 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10717 tree fntype = TREE_TYPE (fndecl);
10718 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10719 tree *argarray = (tree *) alloca (n * sizeof (tree));
10723 for (i = 0; i < n; i++)
10724 argarray[i] = va_arg (ap, tree);
10726 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10729 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10730 N arguments are passed in the array ARGARRAY. */
10733 fold_builtin_call_array (location_t loc, tree type,
10738 tree ret = NULL_TREE;
10742 if (TREE_CODE (fn) == ADDR_EXPR)
10744 tree fndecl = TREE_OPERAND (fn, 0);
10745 if (TREE_CODE (fndecl) == FUNCTION_DECL
10746 && DECL_BUILT_IN (fndecl))
10748 /* If last argument is __builtin_va_arg_pack (), arguments to this
10749 function are not finalized yet. Defer folding until they are. */
10750 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10752 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10754 && TREE_CODE (fndecl2) == FUNCTION_DECL
10755 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10756 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10757 return build_call_array_loc (loc, type, fn, n, argarray);
10759 if (avoid_folding_inline_builtin (fndecl))
10760 return build_call_array_loc (loc, type, fn, n, argarray);
10761 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10763 tree arglist = NULL_TREE;
10764 for (i = n - 1; i >= 0; i--)
10765 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10766 ret = targetm.fold_builtin (fndecl, arglist, false);
10769 return build_call_array_loc (loc, type, fn, n, argarray);
10771 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10773 /* First try the transformations that don't require consing up
10775 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10780 /* If we got this far, we need to build an exp. */
10781 exp = build_call_array_loc (loc, type, fn, n, argarray);
10782 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10783 return ret ? ret : exp;
10787 return build_call_array_loc (loc, type, fn, n, argarray);
10790 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10791 along with N new arguments specified as the "..." parameters. SKIP
10792 is the number of arguments in EXP to be omitted. This function is used
10793 to do varargs-to-varargs transformations. */
10796 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10798 int oldnargs = call_expr_nargs (exp);
10799 int nargs = oldnargs - skip + n;
10800 tree fntype = TREE_TYPE (fndecl);
10801 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10809 buffer = XALLOCAVEC (tree, nargs);
10811 for (i = 0; i < n; i++)
10812 buffer[i] = va_arg (ap, tree);
10814 for (j = skip; j < oldnargs; j++, i++)
10815 buffer[i] = CALL_EXPR_ARG (exp, j);
10818 buffer = CALL_EXPR_ARGP (exp) + skip;
10820 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10823 /* Validate a single argument ARG against a tree code CODE representing
10827 validate_arg (const_tree arg, enum tree_code code)
10831 else if (code == POINTER_TYPE)
10832 return POINTER_TYPE_P (TREE_TYPE (arg));
10833 else if (code == INTEGER_TYPE)
10834 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10835 return code == TREE_CODE (TREE_TYPE (arg));
10838 /* This function validates the types of a function call argument list
10839 against a specified list of tree_codes. If the last specifier is a 0,
10840 that represents an ellipses, otherwise the last specifier must be a
10843 This is the GIMPLE version of validate_arglist. Eventually we want to
10844 completely convert builtins.c to work from GIMPLEs and the tree based
10845 validate_arglist will then be removed. */
10848 validate_gimple_arglist (const_gimple call, ...)
10850 enum tree_code code;
10856 va_start (ap, call);
10861 code = (enum tree_code) va_arg (ap, int);
10865 /* This signifies an ellipses, any further arguments are all ok. */
10869 /* This signifies an endlink, if no arguments remain, return
10870 true, otherwise return false. */
10871 res = (i == gimple_call_num_args (call));
10874 /* If no parameters remain or the parameter's code does not
10875 match the specified code, return false. Otherwise continue
10876 checking any remaining arguments. */
10877 arg = gimple_call_arg (call, i++);
10878 if (!validate_arg (arg, code))
10885 /* We need gotos here since we can only have one VA_CLOSE in a
10893 /* This function validates the types of a function call argument list
10894 against a specified list of tree_codes. If the last specifier is a 0,
10895 that represents an ellipses, otherwise the last specifier must be a
10899 validate_arglist (const_tree callexpr, ...)
10901 enum tree_code code;
10904 const_call_expr_arg_iterator iter;
10907 va_start (ap, callexpr);
10908 init_const_call_expr_arg_iterator (callexpr, &iter);
10912 code = (enum tree_code) va_arg (ap, int);
10916 /* This signifies an ellipses, any further arguments are all ok. */
10920 /* This signifies an endlink, if no arguments remain, return
10921 true, otherwise return false. */
10922 res = !more_const_call_expr_args_p (&iter);
10925 /* If no parameters remain or the parameter's code does not
10926 match the specified code, return false. Otherwise continue
10927 checking any remaining arguments. */
10928 arg = next_const_call_expr_arg (&iter);
10929 if (!validate_arg (arg, code))
10936 /* We need gotos here since we can only have one VA_CLOSE in a
10944 /* Default target-specific builtin expander that does nothing. */
10947 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10948 rtx target ATTRIBUTE_UNUSED,
10949 rtx subtarget ATTRIBUTE_UNUSED,
10950 enum machine_mode mode ATTRIBUTE_UNUSED,
10951 int ignore ATTRIBUTE_UNUSED)
10956 /* Returns true is EXP represents data that would potentially reside
10957 in a readonly section. */
10960 readonly_data_expr (tree exp)
10964 if (TREE_CODE (exp) != ADDR_EXPR)
10967 exp = get_base_address (TREE_OPERAND (exp, 0));
10971 /* Make sure we call decl_readonly_section only for trees it
10972 can handle (since it returns true for everything it doesn't
10974 if (TREE_CODE (exp) == STRING_CST
10975 || TREE_CODE (exp) == CONSTRUCTOR
10976 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10977 return decl_readonly_section (exp, 0);
10982 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10983 to the call, and TYPE is its return type.
10985 Return NULL_TREE if no simplification was possible, otherwise return the
10986 simplified form of the call as a tree.
10988 The simplified form may be a constant or other expression which
10989 computes the same value, but in a more efficient manner (including
10990 calls to other builtin functions).
10992 The call may contain arguments which need to be evaluated, but
10993 which are not useful to determine the result of the call. In
10994 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10995 COMPOUND_EXPR will be an argument which must be evaluated.
10996 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10997 COMPOUND_EXPR in the chain will contain the tree for the simplified
10998 form of the builtin function call. */
11001 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11003 if (!validate_arg (s1, POINTER_TYPE)
11004 || !validate_arg (s2, POINTER_TYPE))
11009 const char *p1, *p2;
11011 p2 = c_getstr (s2);
11015 p1 = c_getstr (s1);
11018 const char *r = strstr (p1, p2);
11022 return build_int_cst (TREE_TYPE (s1), 0);
11024 /* Return an offset into the constant string argument. */
11025 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11026 s1, size_int (r - p1));
11027 return fold_convert_loc (loc, type, tem);
11030 /* The argument is const char *, and the result is char *, so we need
11031 a type conversion here to avoid a warning. */
11033 return fold_convert_loc (loc, type, s1);
11038 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11042 /* New argument list transforming strstr(s1, s2) to
11043 strchr(s1, s2[0]). */
11044 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11048 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11049 the call, and TYPE is its return type.
11051 Return NULL_TREE if no simplification was possible, otherwise return the
11052 simplified form of the call as a tree.
11054 The simplified form may be a constant or other expression which
11055 computes the same value, but in a more efficient manner (including
11056 calls to other builtin functions).
11058 The call may contain arguments which need to be evaluated, but
11059 which are not useful to determine the result of the call. In
11060 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11061 COMPOUND_EXPR will be an argument which must be evaluated.
11062 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11063 COMPOUND_EXPR in the chain will contain the tree for the simplified
11064 form of the builtin function call. */
11067 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11069 if (!validate_arg (s1, POINTER_TYPE)
11070 || !validate_arg (s2, INTEGER_TYPE))
11076 if (TREE_CODE (s2) != INTEGER_CST)
11079 p1 = c_getstr (s1);
11086 if (target_char_cast (s2, &c))
11089 r = strchr (p1, c);
11092 return build_int_cst (TREE_TYPE (s1), 0);
11094 /* Return an offset into the constant string argument. */
11095 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11096 s1, size_int (r - p1));
11097 return fold_convert_loc (loc, type, tem);
11103 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11104 the call, and TYPE is its return type.
11106 Return NULL_TREE if no simplification was possible, otherwise return the
11107 simplified form of the call as a tree.
11109 The simplified form may be a constant or other expression which
11110 computes the same value, but in a more efficient manner (including
11111 calls to other builtin functions).
11113 The call may contain arguments which need to be evaluated, but
11114 which are not useful to determine the result of the call. In
11115 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11116 COMPOUND_EXPR will be an argument which must be evaluated.
11117 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11118 COMPOUND_EXPR in the chain will contain the tree for the simplified
11119 form of the builtin function call. */
11122 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11124 if (!validate_arg (s1, POINTER_TYPE)
11125 || !validate_arg (s2, INTEGER_TYPE))
11132 if (TREE_CODE (s2) != INTEGER_CST)
11135 p1 = c_getstr (s1);
11142 if (target_char_cast (s2, &c))
11145 r = strrchr (p1, c);
11148 return build_int_cst (TREE_TYPE (s1), 0);
11150 /* Return an offset into the constant string argument. */
11151 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11152 s1, size_int (r - p1));
11153 return fold_convert_loc (loc, type, tem);
11156 if (! integer_zerop (s2))
11159 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11163 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11164 return build_call_expr_loc (loc, fn, 2, s1, s2);
11168 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11169 to the call, and TYPE is its return type.
11171 Return NULL_TREE if no simplification was possible, otherwise return the
11172 simplified form of the call as a tree.
11174 The simplified form may be a constant or other expression which
11175 computes the same value, but in a more efficient manner (including
11176 calls to other builtin functions).
11178 The call may contain arguments which need to be evaluated, but
11179 which are not useful to determine the result of the call. In
11180 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11181 COMPOUND_EXPR will be an argument which must be evaluated.
11182 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11183 COMPOUND_EXPR in the chain will contain the tree for the simplified
11184 form of the builtin function call. */
11187 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11189 if (!validate_arg (s1, POINTER_TYPE)
11190 || !validate_arg (s2, POINTER_TYPE))
11195 const char *p1, *p2;
11197 p2 = c_getstr (s2);
11201 p1 = c_getstr (s1);
11204 const char *r = strpbrk (p1, p2);
11208 return build_int_cst (TREE_TYPE (s1), 0);
11210 /* Return an offset into the constant string argument. */
11211 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11212 s1, size_int (r - p1));
11213 return fold_convert_loc (loc, type, tem);
11217 /* strpbrk(x, "") == NULL.
11218 Evaluate and ignore s1 in case it had side-effects. */
11219 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11222 return NULL_TREE; /* Really call strpbrk. */
11224 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11228 /* New argument list transforming strpbrk(s1, s2) to
11229 strchr(s1, s2[0]). */
11230 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11234 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11237 Return NULL_TREE if no simplification was possible, otherwise return the
11238 simplified form of the call as a tree.
11240 The simplified form may be a constant or other expression which
11241 computes the same value, but in a more efficient manner (including
11242 calls to other builtin functions).
11244 The call may contain arguments which need to be evaluated, but
11245 which are not useful to determine the result of the call. In
11246 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11247 COMPOUND_EXPR will be an argument which must be evaluated.
11248 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11249 COMPOUND_EXPR in the chain will contain the tree for the simplified
11250 form of the builtin function call. */
11253 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11255 if (!validate_arg (dst, POINTER_TYPE)
11256 || !validate_arg (src, POINTER_TYPE))
11260 const char *p = c_getstr (src);
11262 /* If the string length is zero, return the dst parameter. */
11263 if (p && *p == '\0')
11266 if (optimize_insn_for_speed_p ())
11268 /* See if we can store by pieces into (dst + strlen(dst)). */
11270 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11271 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11273 if (!strlen_fn || !strcpy_fn)
11276 /* If we don't have a movstr we don't want to emit an strcpy
11277 call. We have to do that if the length of the source string
11278 isn't computable (in that case we can use memcpy probably
11279 later expanding to a sequence of mov instructions). If we
11280 have movstr instructions we can emit strcpy calls. */
11283 tree len = c_strlen (src, 1);
11284 if (! len || TREE_SIDE_EFFECTS (len))
11288 /* Stabilize the argument list. */
11289 dst = builtin_save_expr (dst);
11291 /* Create strlen (dst). */
11292 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11293 /* Create (dst p+ strlen (dst)). */
11295 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11296 TREE_TYPE (dst), dst, newdst);
11297 newdst = builtin_save_expr (newdst);
11299 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11300 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11306 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11307 arguments to the call.
11309 Return NULL_TREE if no simplification was possible, otherwise return the
11310 simplified form of the call as a tree.
11312 The simplified form may be a constant or other expression which
11313 computes the same value, but in a more efficient manner (including
11314 calls to other builtin functions).
11316 The call may contain arguments which need to be evaluated, but
11317 which are not useful to determine the result of the call. In
11318 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11319 COMPOUND_EXPR will be an argument which must be evaluated.
11320 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11321 COMPOUND_EXPR in the chain will contain the tree for the simplified
11322 form of the builtin function call. */
11325 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11327 if (!validate_arg (dst, POINTER_TYPE)
11328 || !validate_arg (src, POINTER_TYPE)
11329 || !validate_arg (len, INTEGER_TYPE))
11333 const char *p = c_getstr (src);
11335 /* If the requested length is zero, or the src parameter string
11336 length is zero, return the dst parameter. */
11337 if (integer_zerop (len) || (p && *p == '\0'))
11338 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11340 /* If the requested len is greater than or equal to the string
11341 length, call strcat. */
11342 if (TREE_CODE (len) == INTEGER_CST && p
11343 && compare_tree_int (len, strlen (p)) >= 0)
11345 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11347 /* If the replacement _DECL isn't initialized, don't do the
11352 return build_call_expr_loc (loc, fn, 2, dst, src);
11358 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11361 Return NULL_TREE if no simplification was possible, otherwise return the
11362 simplified form of the call as a tree.
11364 The simplified form may be a constant or other expression which
11365 computes the same value, but in a more efficient manner (including
11366 calls to other builtin functions).
11368 The call may contain arguments which need to be evaluated, but
11369 which are not useful to determine the result of the call. In
11370 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11371 COMPOUND_EXPR will be an argument which must be evaluated.
11372 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11373 COMPOUND_EXPR in the chain will contain the tree for the simplified
11374 form of the builtin function call. */
11377 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11379 if (!validate_arg (s1, POINTER_TYPE)
11380 || !validate_arg (s2, POINTER_TYPE))
11384 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11386 /* If both arguments are constants, evaluate at compile-time. */
11389 const size_t r = strspn (p1, p2);
11390 return size_int (r);
11393 /* If either argument is "", return NULL_TREE. */
11394 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11395 /* Evaluate and ignore both arguments in case either one has
11397 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11403 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11406 Return NULL_TREE if no simplification was possible, otherwise return the
11407 simplified form of the call as a tree.
11409 The simplified form may be a constant or other expression which
11410 computes the same value, but in a more efficient manner (including
11411 calls to other builtin functions).
11413 The call may contain arguments which need to be evaluated, but
11414 which are not useful to determine the result of the call. In
11415 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11416 COMPOUND_EXPR will be an argument which must be evaluated.
11417 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11418 COMPOUND_EXPR in the chain will contain the tree for the simplified
11419 form of the builtin function call. */
11422 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11424 if (!validate_arg (s1, POINTER_TYPE)
11425 || !validate_arg (s2, POINTER_TYPE))
11429 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11431 /* If both arguments are constants, evaluate at compile-time. */
11434 const size_t r = strcspn (p1, p2);
11435 return size_int (r);
11438 /* If the first argument is "", return NULL_TREE. */
11439 if (p1 && *p1 == '\0')
11441 /* Evaluate and ignore argument s2 in case it has
11443 return omit_one_operand_loc (loc, size_type_node,
11444 size_zero_node, s2);
11447 /* If the second argument is "", return __builtin_strlen(s1). */
11448 if (p2 && *p2 == '\0')
11450 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11452 /* If the replacement _DECL isn't initialized, don't do the
11457 return build_call_expr_loc (loc, fn, 1, s1);
11463 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11464 to the call. IGNORE is true if the value returned
11465 by the builtin will be ignored. UNLOCKED is true is true if this
11466 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11467 the known length of the string. Return NULL_TREE if no simplification
11471 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11472 bool ignore, bool unlocked, tree len)
11474 /* If we're using an unlocked function, assume the other unlocked
11475 functions exist explicitly. */
11476 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11477 : implicit_built_in_decls[BUILT_IN_FPUTC];
11478 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11479 : implicit_built_in_decls[BUILT_IN_FWRITE];
11481 /* If the return value is used, don't do the transformation. */
11485 /* Verify the arguments in the original call. */
11486 if (!validate_arg (arg0, POINTER_TYPE)
11487 || !validate_arg (arg1, POINTER_TYPE))
11491 len = c_strlen (arg0, 0);
11493 /* Get the length of the string passed to fputs. If the length
11494 can't be determined, punt. */
11496 || TREE_CODE (len) != INTEGER_CST)
11499 switch (compare_tree_int (len, 1))
11501 case -1: /* length is 0, delete the call entirely . */
11502 return omit_one_operand_loc (loc, integer_type_node,
11503 integer_zero_node, arg1);;
11505 case 0: /* length is 1, call fputc. */
11507 const char *p = c_getstr (arg0);
11512 return build_call_expr_loc (loc, fn_fputc, 2,
11513 build_int_cst (NULL_TREE, p[0]), arg1);
11519 case 1: /* length is greater than 1, call fwrite. */
11521 /* If optimizing for size keep fputs. */
11522 if (optimize_function_for_size_p (cfun))
11524 /* New argument list transforming fputs(string, stream) to
11525 fwrite(string, 1, len, stream). */
11527 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11528 size_one_node, len, arg1);
11533 gcc_unreachable ();
11538 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11539 produced. False otherwise. This is done so that we don't output the error
11540 or warning twice or three times. */
11543 fold_builtin_next_arg (tree exp, bool va_start_p)
11545 tree fntype = TREE_TYPE (current_function_decl);
11546 int nargs = call_expr_nargs (exp);
11549 if (TYPE_ARG_TYPES (fntype) == 0
11550 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11551 == void_type_node))
11553 error ("%<va_start%> used in function with fixed args");
11559 if (va_start_p && (nargs != 2))
11561 error ("wrong number of arguments to function %<va_start%>");
11564 arg = CALL_EXPR_ARG (exp, 1);
11566 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11567 when we checked the arguments and if needed issued a warning. */
11572 /* Evidently an out of date version of <stdarg.h>; can't validate
11573 va_start's second argument, but can still work as intended. */
11574 warning (0, "%<__builtin_next_arg%> called without an argument");
11577 else if (nargs > 1)
11579 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11582 arg = CALL_EXPR_ARG (exp, 0);
11585 if (TREE_CODE (arg) == SSA_NAME)
11586 arg = SSA_NAME_VAR (arg);
11588 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11589 or __builtin_next_arg (0) the first time we see it, after checking
11590 the arguments and if needed issuing a warning. */
11591 if (!integer_zerop (arg))
11593 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11595 /* Strip off all nops for the sake of the comparison. This
11596 is not quite the same as STRIP_NOPS. It does more.
11597 We must also strip off INDIRECT_EXPR for C++ reference
11599 while (CONVERT_EXPR_P (arg)
11600 || TREE_CODE (arg) == INDIRECT_REF)
11601 arg = TREE_OPERAND (arg, 0);
11602 if (arg != last_parm)
11604 /* FIXME: Sometimes with the tree optimizers we can get the
11605 not the last argument even though the user used the last
11606 argument. We just warn and set the arg to be the last
11607 argument so that we will get wrong-code because of
11609 warning (0, "second parameter of %<va_start%> not last named argument");
11612 /* Undefined by C99 7.15.1.4p4 (va_start):
11613 "If the parameter parmN is declared with the register storage
11614 class, with a function or array type, or with a type that is
11615 not compatible with the type that results after application of
11616 the default argument promotions, the behavior is undefined."
11618 else if (DECL_REGISTER (arg))
11619 warning (0, "undefined behaviour when second parameter of "
11620 "%<va_start%> is declared with %<register%> storage");
11622 /* We want to verify the second parameter just once before the tree
11623 optimizers are run and then avoid keeping it in the tree,
11624 as otherwise we could warn even for correct code like:
11625 void foo (int i, ...)
11626 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11628 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11630 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11636 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11637 ORIG may be null if this is a 2-argument call. We don't attempt to
11638 simplify calls with more than 3 arguments.
11640 Return NULL_TREE if no simplification was possible, otherwise return the
11641 simplified form of the call as a tree. If IGNORED is true, it means that
11642 the caller does not use the returned value of the function. */
11645 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11646 tree orig, int ignored)
11649 const char *fmt_str = NULL;
11651 /* Verify the required arguments in the original call. We deal with two
11652 types of sprintf() calls: 'sprintf (str, fmt)' and
11653 'sprintf (dest, "%s", orig)'. */
11654 if (!validate_arg (dest, POINTER_TYPE)
11655 || !validate_arg (fmt, POINTER_TYPE))
11657 if (orig && !validate_arg (orig, POINTER_TYPE))
11660 /* Check whether the format is a literal string constant. */
11661 fmt_str = c_getstr (fmt);
11662 if (fmt_str == NULL)
11666 retval = NULL_TREE;
11668 if (!init_target_chars ())
11671 /* If the format doesn't contain % args or %%, use strcpy. */
11672 if (strchr (fmt_str, target_percent) == NULL)
11674 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11679 /* Don't optimize sprintf (buf, "abc", ptr++). */
11683 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11684 'format' is known to contain no % formats. */
11685 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11687 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11690 /* If the format is "%s", use strcpy if the result isn't used. */
11691 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11694 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11699 /* Don't crash on sprintf (str1, "%s"). */
11703 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11706 retval = c_strlen (orig, 1);
11707 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11710 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11713 if (call && retval)
11715 retval = fold_convert_loc
11716 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11718 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11724 /* Expand a call EXP to __builtin_object_size. */
11727 expand_builtin_object_size (tree exp)
11730 int object_size_type;
11731 tree fndecl = get_callee_fndecl (exp);
11733 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11735 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11737 expand_builtin_trap ();
11741 ost = CALL_EXPR_ARG (exp, 1);
11744 if (TREE_CODE (ost) != INTEGER_CST
11745 || tree_int_cst_sgn (ost) < 0
11746 || compare_tree_int (ost, 3) > 0)
11748 error ("%Klast argument of %D is not integer constant between 0 and 3",
11750 expand_builtin_trap ();
11754 object_size_type = tree_low_cst (ost, 0);
11756 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11759 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11760 FCODE is the BUILT_IN_* to use.
11761 Return NULL_RTX if we failed; the caller should emit a normal call,
11762 otherwise try to get the result in TARGET, if convenient (and in
11763 mode MODE if that's convenient). */
11766 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11767 enum built_in_function fcode)
11769 tree dest, src, len, size;
11771 if (!validate_arglist (exp,
11773 fcode == BUILT_IN_MEMSET_CHK
11774 ? INTEGER_TYPE : POINTER_TYPE,
11775 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11778 dest = CALL_EXPR_ARG (exp, 0);
11779 src = CALL_EXPR_ARG (exp, 1);
11780 len = CALL_EXPR_ARG (exp, 2);
11781 size = CALL_EXPR_ARG (exp, 3);
11783 if (! host_integerp (size, 1))
11786 if (host_integerp (len, 1) || integer_all_onesp (size))
11790 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11792 warning_at (tree_nonartificial_location (exp),
11793 0, "%Kcall to %D will always overflow destination buffer",
11794 exp, get_callee_fndecl (exp));
11799 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11800 mem{cpy,pcpy,move,set} is available. */
11803 case BUILT_IN_MEMCPY_CHK:
11804 fn = built_in_decls[BUILT_IN_MEMCPY];
11806 case BUILT_IN_MEMPCPY_CHK:
11807 fn = built_in_decls[BUILT_IN_MEMPCPY];
11809 case BUILT_IN_MEMMOVE_CHK:
11810 fn = built_in_decls[BUILT_IN_MEMMOVE];
11812 case BUILT_IN_MEMSET_CHK:
11813 fn = built_in_decls[BUILT_IN_MEMSET];
11822 fn = build_call_nofold (fn, 3, dest, src, len);
11823 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11824 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11825 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11827 else if (fcode == BUILT_IN_MEMSET_CHK)
11831 unsigned int dest_align
11832 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11834 /* If DEST is not a pointer type, call the normal function. */
11835 if (dest_align == 0)
11838 /* If SRC and DEST are the same (and not volatile), do nothing. */
11839 if (operand_equal_p (src, dest, 0))
11843 if (fcode != BUILT_IN_MEMPCPY_CHK)
11845 /* Evaluate and ignore LEN in case it has side-effects. */
11846 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11847 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11850 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11851 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11854 /* __memmove_chk special case. */
11855 if (fcode == BUILT_IN_MEMMOVE_CHK)
11857 unsigned int src_align
11858 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11860 if (src_align == 0)
11863 /* If src is categorized for a readonly section we can use
11864 normal __memcpy_chk. */
11865 if (readonly_data_expr (src))
11867 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11870 fn = build_call_nofold (fn, 4, dest, src, len, size);
11871 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11872 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11873 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11880 /* Emit warning if a buffer overflow is detected at compile time. */
11883 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11887 location_t loc = tree_nonartificial_location (exp);
11891 case BUILT_IN_STRCPY_CHK:
11892 case BUILT_IN_STPCPY_CHK:
11893 /* For __strcat_chk the warning will be emitted only if overflowing
11894 by at least strlen (dest) + 1 bytes. */
11895 case BUILT_IN_STRCAT_CHK:
11896 len = CALL_EXPR_ARG (exp, 1);
11897 size = CALL_EXPR_ARG (exp, 2);
11900 case BUILT_IN_STRNCAT_CHK:
11901 case BUILT_IN_STRNCPY_CHK:
11902 len = CALL_EXPR_ARG (exp, 2);
11903 size = CALL_EXPR_ARG (exp, 3);
11905 case BUILT_IN_SNPRINTF_CHK:
11906 case BUILT_IN_VSNPRINTF_CHK:
11907 len = CALL_EXPR_ARG (exp, 1);
11908 size = CALL_EXPR_ARG (exp, 3);
11911 gcc_unreachable ();
11917 if (! host_integerp (size, 1) || integer_all_onesp (size))
11922 len = c_strlen (len, 1);
11923 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11926 else if (fcode == BUILT_IN_STRNCAT_CHK)
11928 tree src = CALL_EXPR_ARG (exp, 1);
11929 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11931 src = c_strlen (src, 1);
11932 if (! src || ! host_integerp (src, 1))
11934 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11935 exp, get_callee_fndecl (exp));
11938 else if (tree_int_cst_lt (src, size))
11941 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11944 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11945 exp, get_callee_fndecl (exp));
11948 /* Emit warning if a buffer overflow is detected at compile time
11949 in __sprintf_chk/__vsprintf_chk calls. */
11952 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11954 tree size, len, fmt;
11955 const char *fmt_str;
11956 int nargs = call_expr_nargs (exp);
11958 /* Verify the required arguments in the original call. */
11962 size = CALL_EXPR_ARG (exp, 2);
11963 fmt = CALL_EXPR_ARG (exp, 3);
11965 if (! host_integerp (size, 1) || integer_all_onesp (size))
11968 /* Check whether the format is a literal string constant. */
11969 fmt_str = c_getstr (fmt);
11970 if (fmt_str == NULL)
11973 if (!init_target_chars ())
11976 /* If the format doesn't contain % args or %%, we know its size. */
11977 if (strchr (fmt_str, target_percent) == 0)
11978 len = build_int_cstu (size_type_node, strlen (fmt_str));
11979 /* If the format is "%s" and first ... argument is a string literal,
11981 else if (fcode == BUILT_IN_SPRINTF_CHK
11982 && strcmp (fmt_str, target_percent_s) == 0)
11988 arg = CALL_EXPR_ARG (exp, 4);
11989 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11992 len = c_strlen (arg, 1);
11993 if (!len || ! host_integerp (len, 1))
11999 if (! tree_int_cst_lt (len, size))
12000 warning_at (tree_nonartificial_location (exp),
12001 0, "%Kcall to %D will always overflow destination buffer",
12002 exp, get_callee_fndecl (exp));
12005 /* Emit warning if a free is called with address of a variable. */
12008 maybe_emit_free_warning (tree exp)
12010 tree arg = CALL_EXPR_ARG (exp, 0);
12013 if (TREE_CODE (arg) != ADDR_EXPR)
12016 arg = get_base_address (TREE_OPERAND (arg, 0));
12017 if (arg == NULL || INDIRECT_REF_P (arg))
12020 if (SSA_VAR_P (arg))
12021 warning_at (tree_nonartificial_location (exp),
12022 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12024 warning_at (tree_nonartificial_location (exp),
12025 0, "%Kattempt to free a non-heap object", exp);
12028 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12032 fold_builtin_object_size (tree ptr, tree ost)
12034 tree ret = NULL_TREE;
12035 int object_size_type;
12037 if (!validate_arg (ptr, POINTER_TYPE)
12038 || !validate_arg (ost, INTEGER_TYPE))
12043 if (TREE_CODE (ost) != INTEGER_CST
12044 || tree_int_cst_sgn (ost) < 0
12045 || compare_tree_int (ost, 3) > 0)
12048 object_size_type = tree_low_cst (ost, 0);
12050 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12051 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12052 and (size_t) 0 for types 2 and 3. */
12053 if (TREE_SIDE_EFFECTS (ptr))
12054 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12056 if (TREE_CODE (ptr) == ADDR_EXPR)
12057 ret = build_int_cstu (size_type_node,
12058 compute_builtin_object_size (ptr, object_size_type));
12060 else if (TREE_CODE (ptr) == SSA_NAME)
12062 unsigned HOST_WIDE_INT bytes;
12064 /* If object size is not known yet, delay folding until
12065 later. Maybe subsequent passes will help determining
12067 bytes = compute_builtin_object_size (ptr, object_size_type);
12068 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12070 ret = build_int_cstu (size_type_node, bytes);
12075 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12076 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12077 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12084 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12085 DEST, SRC, LEN, and SIZE are the arguments to the call.
12086 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12087 code of the builtin. If MAXLEN is not NULL, it is maximum length
12088 passed as third argument. */
12091 fold_builtin_memory_chk (location_t loc, tree fndecl,
12092 tree dest, tree src, tree len, tree size,
12093 tree maxlen, bool ignore,
12094 enum built_in_function fcode)
12098 if (!validate_arg (dest, POINTER_TYPE)
12099 || !validate_arg (src,
12100 (fcode == BUILT_IN_MEMSET_CHK
12101 ? INTEGER_TYPE : POINTER_TYPE))
12102 || !validate_arg (len, INTEGER_TYPE)
12103 || !validate_arg (size, INTEGER_TYPE))
12106 /* If SRC and DEST are the same (and not volatile), return DEST
12107 (resp. DEST+LEN for __mempcpy_chk). */
12108 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12110 if (fcode != BUILT_IN_MEMPCPY_CHK)
12111 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12115 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12117 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12121 if (! host_integerp (size, 1))
12124 if (! integer_all_onesp (size))
12126 if (! host_integerp (len, 1))
12128 /* If LEN is not constant, try MAXLEN too.
12129 For MAXLEN only allow optimizing into non-_ocs function
12130 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12131 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12133 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12135 /* (void) __mempcpy_chk () can be optimized into
12136 (void) __memcpy_chk (). */
12137 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12141 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12149 if (tree_int_cst_lt (size, maxlen))
12154 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12155 mem{cpy,pcpy,move,set} is available. */
12158 case BUILT_IN_MEMCPY_CHK:
12159 fn = built_in_decls[BUILT_IN_MEMCPY];
12161 case BUILT_IN_MEMPCPY_CHK:
12162 fn = built_in_decls[BUILT_IN_MEMPCPY];
12164 case BUILT_IN_MEMMOVE_CHK:
12165 fn = built_in_decls[BUILT_IN_MEMMOVE];
12167 case BUILT_IN_MEMSET_CHK:
12168 fn = built_in_decls[BUILT_IN_MEMSET];
12177 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12180 /* Fold a call to the __st[rp]cpy_chk builtin.
12181 DEST, SRC, and SIZE are the arguments to the call.
12182 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12183 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12184 strings passed as second argument. */
12187 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12188 tree src, tree size,
12189 tree maxlen, bool ignore,
12190 enum built_in_function fcode)
12194 if (!validate_arg (dest, POINTER_TYPE)
12195 || !validate_arg (src, POINTER_TYPE)
12196 || !validate_arg (size, INTEGER_TYPE))
12199 /* If SRC and DEST are the same (and not volatile), return DEST. */
12200 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12201 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12203 if (! host_integerp (size, 1))
12206 if (! integer_all_onesp (size))
12208 len = c_strlen (src, 1);
12209 if (! len || ! host_integerp (len, 1))
12211 /* If LEN is not constant, try MAXLEN too.
12212 For MAXLEN only allow optimizing into non-_ocs function
12213 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12214 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12216 if (fcode == BUILT_IN_STPCPY_CHK)
12221 /* If return value of __stpcpy_chk is ignored,
12222 optimize into __strcpy_chk. */
12223 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12227 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12230 if (! len || TREE_SIDE_EFFECTS (len))
12233 /* If c_strlen returned something, but not a constant,
12234 transform __strcpy_chk into __memcpy_chk. */
12235 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12239 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12240 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12241 build_call_expr_loc (loc, fn, 4,
12242 dest, src, len, size));
12248 if (! tree_int_cst_lt (maxlen, size))
12252 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12253 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12254 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12258 return build_call_expr_loc (loc, fn, 2, dest, src);
12261 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12262 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12263 length passed as third argument. */
12266 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12267 tree len, tree size, tree maxlen)
12271 if (!validate_arg (dest, POINTER_TYPE)
12272 || !validate_arg (src, POINTER_TYPE)
12273 || !validate_arg (len, INTEGER_TYPE)
12274 || !validate_arg (size, INTEGER_TYPE))
12277 if (! host_integerp (size, 1))
12280 if (! integer_all_onesp (size))
12282 if (! host_integerp (len, 1))
12284 /* If LEN is not constant, try MAXLEN too.
12285 For MAXLEN only allow optimizing into non-_ocs function
12286 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12287 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12293 if (tree_int_cst_lt (size, maxlen))
12297 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12298 fn = built_in_decls[BUILT_IN_STRNCPY];
12302 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12305 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12306 are the arguments to the call. */
12309 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12310 tree src, tree size)
12315 if (!validate_arg (dest, POINTER_TYPE)
12316 || !validate_arg (src, POINTER_TYPE)
12317 || !validate_arg (size, INTEGER_TYPE))
12320 p = c_getstr (src);
12321 /* If the SRC parameter is "", return DEST. */
12322 if (p && *p == '\0')
12323 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12325 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12328 /* If __builtin_strcat_chk is used, assume strcat is available. */
12329 fn = built_in_decls[BUILT_IN_STRCAT];
12333 return build_call_expr_loc (loc, fn, 2, dest, src);
12336 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12340 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12341 tree dest, tree src, tree len, tree size)
12346 if (!validate_arg (dest, POINTER_TYPE)
12347 || !validate_arg (src, POINTER_TYPE)
12348 || !validate_arg (size, INTEGER_TYPE)
12349 || !validate_arg (size, INTEGER_TYPE))
12352 p = c_getstr (src);
12353 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12354 if (p && *p == '\0')
12355 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12356 else if (integer_zerop (len))
12357 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12359 if (! host_integerp (size, 1))
12362 if (! integer_all_onesp (size))
12364 tree src_len = c_strlen (src, 1);
12366 && host_integerp (src_len, 1)
12367 && host_integerp (len, 1)
12368 && ! tree_int_cst_lt (len, src_len))
12370 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12371 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12375 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12380 /* If __builtin_strncat_chk is used, assume strncat is available. */
12381 fn = built_in_decls[BUILT_IN_STRNCAT];
12385 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12388 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12389 a normal call should be emitted rather than expanding the function
12390 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12393 fold_builtin_sprintf_chk (location_t loc, tree exp,
12394 enum built_in_function fcode)
12396 tree dest, size, len, fn, fmt, flag;
12397 const char *fmt_str;
12398 int nargs = call_expr_nargs (exp);
12400 /* Verify the required arguments in the original call. */
12403 dest = CALL_EXPR_ARG (exp, 0);
12404 if (!validate_arg (dest, POINTER_TYPE))
12406 flag = CALL_EXPR_ARG (exp, 1);
12407 if (!validate_arg (flag, INTEGER_TYPE))
12409 size = CALL_EXPR_ARG (exp, 2);
12410 if (!validate_arg (size, INTEGER_TYPE))
12412 fmt = CALL_EXPR_ARG (exp, 3);
12413 if (!validate_arg (fmt, POINTER_TYPE))
12416 if (! host_integerp (size, 1))
12421 if (!init_target_chars ())
12424 /* Check whether the format is a literal string constant. */
12425 fmt_str = c_getstr (fmt);
12426 if (fmt_str != NULL)
12428 /* If the format doesn't contain % args or %%, we know the size. */
12429 if (strchr (fmt_str, target_percent) == 0)
12431 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12432 len = build_int_cstu (size_type_node, strlen (fmt_str));
12434 /* If the format is "%s" and first ... argument is a string literal,
12435 we know the size too. */
12436 else if (fcode == BUILT_IN_SPRINTF_CHK
12437 && strcmp (fmt_str, target_percent_s) == 0)
12443 arg = CALL_EXPR_ARG (exp, 4);
12444 if (validate_arg (arg, POINTER_TYPE))
12446 len = c_strlen (arg, 1);
12447 if (! len || ! host_integerp (len, 1))
12454 if (! integer_all_onesp (size))
12456 if (! len || ! tree_int_cst_lt (len, size))
12460 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12461 or if format doesn't contain % chars or is "%s". */
12462 if (! integer_zerop (flag))
12464 if (fmt_str == NULL)
12466 if (strchr (fmt_str, target_percent) != NULL
12467 && strcmp (fmt_str, target_percent_s))
12471 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12472 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12473 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12477 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12480 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12481 a normal call should be emitted rather than expanding the function
12482 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12483 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12484 passed as second argument. */
12487 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12488 enum built_in_function fcode)
12490 tree dest, size, len, fn, fmt, flag;
12491 const char *fmt_str;
12493 /* Verify the required arguments in the original call. */
12494 if (call_expr_nargs (exp) < 5)
12496 dest = CALL_EXPR_ARG (exp, 0);
12497 if (!validate_arg (dest, POINTER_TYPE))
12499 len = CALL_EXPR_ARG (exp, 1);
12500 if (!validate_arg (len, INTEGER_TYPE))
12502 flag = CALL_EXPR_ARG (exp, 2);
12503 if (!validate_arg (flag, INTEGER_TYPE))
12505 size = CALL_EXPR_ARG (exp, 3);
12506 if (!validate_arg (size, INTEGER_TYPE))
12508 fmt = CALL_EXPR_ARG (exp, 4);
12509 if (!validate_arg (fmt, POINTER_TYPE))
12512 if (! host_integerp (size, 1))
12515 if (! integer_all_onesp (size))
12517 if (! host_integerp (len, 1))
12519 /* If LEN is not constant, try MAXLEN too.
12520 For MAXLEN only allow optimizing into non-_ocs function
12521 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12522 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12528 if (tree_int_cst_lt (size, maxlen))
12532 if (!init_target_chars ())
12535 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12536 or if format doesn't contain % chars or is "%s". */
12537 if (! integer_zerop (flag))
12539 fmt_str = c_getstr (fmt);
12540 if (fmt_str == NULL)
12542 if (strchr (fmt_str, target_percent) != NULL
12543 && strcmp (fmt_str, target_percent_s))
12547 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12549 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12550 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12554 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12557 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12558 FMT and ARG are the arguments to the call; we don't fold cases with
12559 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12561 Return NULL_TREE if no simplification was possible, otherwise return the
12562 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12563 code of the function to be simplified. */
12566 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12567 tree arg, bool ignore,
12568 enum built_in_function fcode)
12570 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12571 const char *fmt_str = NULL;
12573 /* If the return value is used, don't do the transformation. */
12577 /* Verify the required arguments in the original call. */
12578 if (!validate_arg (fmt, POINTER_TYPE))
12581 /* Check whether the format is a literal string constant. */
12582 fmt_str = c_getstr (fmt);
12583 if (fmt_str == NULL)
12586 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12588 /* If we're using an unlocked function, assume the other
12589 unlocked functions exist explicitly. */
12590 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12591 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12595 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12596 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12599 if (!init_target_chars ())
12602 if (strcmp (fmt_str, target_percent_s) == 0
12603 || strchr (fmt_str, target_percent) == NULL)
12607 if (strcmp (fmt_str, target_percent_s) == 0)
12609 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12612 if (!arg || !validate_arg (arg, POINTER_TYPE))
12615 str = c_getstr (arg);
12621 /* The format specifier doesn't contain any '%' characters. */
12622 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12628 /* If the string was "", printf does nothing. */
12629 if (str[0] == '\0')
12630 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12632 /* If the string has length of 1, call putchar. */
12633 if (str[1] == '\0')
12635 /* Given printf("c"), (where c is any one character,)
12636 convert "c"[0] to an int and pass that to the replacement
12638 newarg = build_int_cst (NULL_TREE, str[0]);
12640 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12644 /* If the string was "string\n", call puts("string"). */
12645 size_t len = strlen (str);
12646 if ((unsigned char)str[len - 1] == target_newline)
12648 /* Create a NUL-terminated string that's one char shorter
12649 than the original, stripping off the trailing '\n'. */
12650 char *newstr = XALLOCAVEC (char, len);
12651 memcpy (newstr, str, len - 1);
12652 newstr[len - 1] = 0;
12654 newarg = build_string_literal (len, newstr);
12656 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12659 /* We'd like to arrange to call fputs(string,stdout) here,
12660 but we need stdout and don't have a way to get it yet. */
12665 /* The other optimizations can be done only on the non-va_list variants. */
12666 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12669 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12670 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12672 if (!arg || !validate_arg (arg, POINTER_TYPE))
12675 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12678 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12679 else if (strcmp (fmt_str, target_percent_c) == 0)
12681 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12684 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12690 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12693 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12694 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12695 more than 3 arguments, and ARG may be null in the 2-argument case.
12697 Return NULL_TREE if no simplification was possible, otherwise return the
12698 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12699 code of the function to be simplified. */
12702 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12703 tree fmt, tree arg, bool ignore,
12704 enum built_in_function fcode)
12706 tree fn_fputc, fn_fputs, call = NULL_TREE;
12707 const char *fmt_str = NULL;
12709 /* If the return value is used, don't do the transformation. */
12713 /* Verify the required arguments in the original call. */
12714 if (!validate_arg (fp, POINTER_TYPE))
12716 if (!validate_arg (fmt, POINTER_TYPE))
12719 /* Check whether the format is a literal string constant. */
12720 fmt_str = c_getstr (fmt);
12721 if (fmt_str == NULL)
12724 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12726 /* If we're using an unlocked function, assume the other
12727 unlocked functions exist explicitly. */
12728 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12729 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12733 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12734 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12737 if (!init_target_chars ())
12740 /* If the format doesn't contain % args or %%, use strcpy. */
12741 if (strchr (fmt_str, target_percent) == NULL)
12743 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12747 /* If the format specifier was "", fprintf does nothing. */
12748 if (fmt_str[0] == '\0')
12750 /* If FP has side-effects, just wait until gimplification is
12752 if (TREE_SIDE_EFFECTS (fp))
12755 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12758 /* When "string" doesn't contain %, replace all cases of
12759 fprintf (fp, string) with fputs (string, fp). The fputs
12760 builtin will take care of special cases like length == 1. */
12762 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12765 /* The other optimizations can be done only on the non-va_list variants. */
12766 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12769 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12770 else if (strcmp (fmt_str, target_percent_s) == 0)
12772 if (!arg || !validate_arg (arg, POINTER_TYPE))
12775 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12778 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12779 else if (strcmp (fmt_str, target_percent_c) == 0)
12781 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12784 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12789 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12792 /* Initialize format string characters in the target charset. */
12795 init_target_chars (void)
12800 target_newline = lang_hooks.to_target_charset ('\n');
12801 target_percent = lang_hooks.to_target_charset ('%');
12802 target_c = lang_hooks.to_target_charset ('c');
12803 target_s = lang_hooks.to_target_charset ('s');
12804 if (target_newline == 0 || target_percent == 0 || target_c == 0
12808 target_percent_c[0] = target_percent;
12809 target_percent_c[1] = target_c;
12810 target_percent_c[2] = '\0';
12812 target_percent_s[0] = target_percent;
12813 target_percent_s[1] = target_s;
12814 target_percent_s[2] = '\0';
12816 target_percent_s_newline[0] = target_percent;
12817 target_percent_s_newline[1] = target_s;
12818 target_percent_s_newline[2] = target_newline;
12819 target_percent_s_newline[3] = '\0';
12826 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12827 and no overflow/underflow occurred. INEXACT is true if M was not
12828 exactly calculated. TYPE is the tree type for the result. This
12829 function assumes that you cleared the MPFR flags and then
12830 calculated M to see if anything subsequently set a flag prior to
12831 entering this function. Return NULL_TREE if any checks fail. */
12834 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12836 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12837 overflow/underflow occurred. If -frounding-math, proceed iff the
12838 result of calling FUNC was exact. */
12839 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12840 && (!flag_rounding_math || !inexact))
12842 REAL_VALUE_TYPE rr;
12844 real_from_mpfr (&rr, m, type, GMP_RNDN);
12845 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12846 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12847 but the mpft_t is not, then we underflowed in the
12849 if (real_isfinite (&rr)
12850 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12852 REAL_VALUE_TYPE rmode;
12854 real_convert (&rmode, TYPE_MODE (type), &rr);
12855 /* Proceed iff the specified mode can hold the value. */
12856 if (real_identical (&rmode, &rr))
12857 return build_real (type, rmode);
12863 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12864 number and no overflow/underflow occurred. INEXACT is true if M
12865 was not exactly calculated. TYPE is the tree type for the result.
12866 This function assumes that you cleared the MPFR flags and then
12867 calculated M to see if anything subsequently set a flag prior to
12868 entering this function. Return NULL_TREE if any checks fail, if
12869 FORCE_CONVERT is true, then bypass the checks. */
12872 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12874 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12875 overflow/underflow occurred. If -frounding-math, proceed iff the
12876 result of calling FUNC was exact. */
12878 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12879 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12880 && (!flag_rounding_math || !inexact)))
12882 REAL_VALUE_TYPE re, im;
12884 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12885 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12886 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12887 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12888 but the mpft_t is not, then we underflowed in the
12891 || (real_isfinite (&re) && real_isfinite (&im)
12892 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12893 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12895 REAL_VALUE_TYPE re_mode, im_mode;
12897 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12898 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12899 /* Proceed iff the specified mode can hold the value. */
12901 || (real_identical (&re_mode, &re)
12902 && real_identical (&im_mode, &im)))
12903 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12904 build_real (TREE_TYPE (type), im_mode));
12910 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12911 FUNC on it and return the resulting value as a tree with type TYPE.
12912 If MIN and/or MAX are not NULL, then the supplied ARG must be
12913 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12914 acceptable values, otherwise they are not. The mpfr precision is
12915 set to the precision of TYPE. We assume that function FUNC returns
12916 zero if the result could be calculated exactly within the requested
12920 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12921 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12924 tree result = NULL_TREE;
12928 /* To proceed, MPFR must exactly represent the target floating point
12929 format, which only happens when the target base equals two. */
12930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12931 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12933 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12935 if (real_isfinite (ra)
12936 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12937 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12940 const int prec = fmt->p;
12941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12945 mpfr_init2 (m, prec);
12946 mpfr_from_real (m, ra, GMP_RNDN);
12947 mpfr_clear_flags ();
12948 inexact = func (m, m, rnd);
12949 result = do_mpfr_ckconv (m, type, inexact);
12957 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12958 FUNC on it and return the resulting value as a tree with type TYPE.
12959 The mpfr precision is set to the precision of TYPE. We assume that
12960 function FUNC returns zero if the result could be calculated
12961 exactly within the requested precision. */
12964 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12965 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12967 tree result = NULL_TREE;
12972 /* To proceed, MPFR must exactly represent the target floating point
12973 format, which only happens when the target base equals two. */
12974 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12975 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12976 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12978 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12979 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12981 if (real_isfinite (ra1) && real_isfinite (ra2))
12983 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12984 const int prec = fmt->p;
12985 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12989 mpfr_inits2 (prec, m1, m2, NULL);
12990 mpfr_from_real (m1, ra1, GMP_RNDN);
12991 mpfr_from_real (m2, ra2, GMP_RNDN);
12992 mpfr_clear_flags ();
12993 inexact = func (m1, m1, m2, rnd);
12994 result = do_mpfr_ckconv (m1, type, inexact);
12995 mpfr_clears (m1, m2, NULL);
13002 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13003 FUNC on it and return the resulting value as a tree with type TYPE.
13004 The mpfr precision is set to the precision of TYPE. We assume that
13005 function FUNC returns zero if the result could be calculated
13006 exactly within the requested precision. */
13009 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13010 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13012 tree result = NULL_TREE;
13018 /* To proceed, MPFR must exactly represent the target floating point
13019 format, which only happens when the target base equals two. */
13020 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13021 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13022 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13023 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13025 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13026 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13027 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13029 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13031 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13032 const int prec = fmt->p;
13033 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13037 mpfr_inits2 (prec, m1, m2, m3, NULL);
13038 mpfr_from_real (m1, ra1, GMP_RNDN);
13039 mpfr_from_real (m2, ra2, GMP_RNDN);
13040 mpfr_from_real (m3, ra3, GMP_RNDN);
13041 mpfr_clear_flags ();
13042 inexact = func (m1, m1, m2, m3, rnd);
13043 result = do_mpfr_ckconv (m1, type, inexact);
13044 mpfr_clears (m1, m2, m3, NULL);
13051 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13052 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13053 If ARG_SINP and ARG_COSP are NULL then the result is returned
13054 as a complex value.
13055 The type is taken from the type of ARG and is used for setting the
13056 precision of the calculation and results. */
13059 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13061 tree const type = TREE_TYPE (arg);
13062 tree result = NULL_TREE;
13066 /* To proceed, MPFR must exactly represent the target floating point
13067 format, which only happens when the target base equals two. */
13068 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13069 && TREE_CODE (arg) == REAL_CST
13070 && !TREE_OVERFLOW (arg))
13072 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13074 if (real_isfinite (ra))
13076 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13077 const int prec = fmt->p;
13078 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13079 tree result_s, result_c;
13083 mpfr_inits2 (prec, m, ms, mc, NULL);
13084 mpfr_from_real (m, ra, GMP_RNDN);
13085 mpfr_clear_flags ();
13086 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13087 result_s = do_mpfr_ckconv (ms, type, inexact);
13088 result_c = do_mpfr_ckconv (mc, type, inexact);
13089 mpfr_clears (m, ms, mc, NULL);
13090 if (result_s && result_c)
13092 /* If we are to return in a complex value do so. */
13093 if (!arg_sinp && !arg_cosp)
13094 return build_complex (build_complex_type (type),
13095 result_c, result_s);
13097 /* Dereference the sin/cos pointer arguments. */
13098 arg_sinp = build_fold_indirect_ref (arg_sinp);
13099 arg_cosp = build_fold_indirect_ref (arg_cosp);
13100 /* Proceed if valid pointer type were passed in. */
13101 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13102 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13104 /* Set the values. */
13105 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13107 TREE_SIDE_EFFECTS (result_s) = 1;
13108 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13110 TREE_SIDE_EFFECTS (result_c) = 1;
13111 /* Combine the assignments into a compound expr. */
13112 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13113 result_s, result_c));
13121 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13122 two-argument mpfr order N Bessel function FUNC on them and return
13123 the resulting value as a tree with type TYPE. The mpfr precision
13124 is set to the precision of TYPE. We assume that function FUNC
13125 returns zero if the result could be calculated exactly within the
13126 requested precision. */
13128 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13129 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13130 const REAL_VALUE_TYPE *min, bool inclusive)
13132 tree result = NULL_TREE;
13137 /* To proceed, MPFR must exactly represent the target floating point
13138 format, which only happens when the target base equals two. */
13139 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13140 && host_integerp (arg1, 0)
13141 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13143 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13144 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13147 && real_isfinite (ra)
13148 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13150 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13151 const int prec = fmt->p;
13152 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13156 mpfr_init2 (m, prec);
13157 mpfr_from_real (m, ra, GMP_RNDN);
13158 mpfr_clear_flags ();
13159 inexact = func (m, n, m, rnd);
13160 result = do_mpfr_ckconv (m, type, inexact);
13168 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13169 the pointer *(ARG_QUO) and return the result. The type is taken
13170 from the type of ARG0 and is used for setting the precision of the
13171 calculation and results. */
13174 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13176 tree const type = TREE_TYPE (arg0);
13177 tree result = NULL_TREE;
13182 /* To proceed, MPFR must exactly represent the target floating point
13183 format, which only happens when the target base equals two. */
13184 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13185 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13186 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13188 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13189 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13191 if (real_isfinite (ra0) && real_isfinite (ra1))
13193 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13194 const int prec = fmt->p;
13195 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13200 mpfr_inits2 (prec, m0, m1, NULL);
13201 mpfr_from_real (m0, ra0, GMP_RNDN);
13202 mpfr_from_real (m1, ra1, GMP_RNDN);
13203 mpfr_clear_flags ();
13204 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13205 /* Remquo is independent of the rounding mode, so pass
13206 inexact=0 to do_mpfr_ckconv(). */
13207 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13208 mpfr_clears (m0, m1, NULL);
13211 /* MPFR calculates quo in the host's long so it may
13212 return more bits in quo than the target int can hold
13213 if sizeof(host long) > sizeof(target int). This can
13214 happen even for native compilers in LP64 mode. In
13215 these cases, modulo the quo value with the largest
13216 number that the target int can hold while leaving one
13217 bit for the sign. */
13218 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13219 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13221 /* Dereference the quo pointer argument. */
13222 arg_quo = build_fold_indirect_ref (arg_quo);
13223 /* Proceed iff a valid pointer type was passed in. */
13224 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13226 /* Set the value. */
13227 tree result_quo = fold_build2 (MODIFY_EXPR,
13228 TREE_TYPE (arg_quo), arg_quo,
13229 build_int_cst (NULL, integer_quo));
13230 TREE_SIDE_EFFECTS (result_quo) = 1;
13231 /* Combine the quo assignment with the rem. */
13232 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13233 result_quo, result_rem));
13241 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13242 resulting value as a tree with type TYPE. The mpfr precision is
13243 set to the precision of TYPE. We assume that this mpfr function
13244 returns zero if the result could be calculated exactly within the
13245 requested precision. In addition, the integer pointer represented
13246 by ARG_SG will be dereferenced and set to the appropriate signgam
13250 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13252 tree result = NULL_TREE;
13256 /* To proceed, MPFR must exactly represent the target floating point
13257 format, which only happens when the target base equals two. Also
13258 verify ARG is a constant and that ARG_SG is an int pointer. */
13259 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13260 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13261 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13262 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13264 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13266 /* In addition to NaN and Inf, the argument cannot be zero or a
13267 negative integer. */
13268 if (real_isfinite (ra)
13269 && ra->cl != rvc_zero
13270 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13272 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13273 const int prec = fmt->p;
13274 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13279 mpfr_init2 (m, prec);
13280 mpfr_from_real (m, ra, GMP_RNDN);
13281 mpfr_clear_flags ();
13282 inexact = mpfr_lgamma (m, &sg, m, rnd);
13283 result_lg = do_mpfr_ckconv (m, type, inexact);
13289 /* Dereference the arg_sg pointer argument. */
13290 arg_sg = build_fold_indirect_ref (arg_sg);
13291 /* Assign the signgam value into *arg_sg. */
13292 result_sg = fold_build2 (MODIFY_EXPR,
13293 TREE_TYPE (arg_sg), arg_sg,
13294 build_int_cst (NULL, sg));
13295 TREE_SIDE_EFFECTS (result_sg) = 1;
13296 /* Combine the signgam assignment with the lgamma result. */
13297 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13298 result_sg, result_lg));
13306 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13307 function FUNC on it and return the resulting value as a tree with
13308 type TYPE. The mpfr precision is set to the precision of TYPE. We
13309 assume that function FUNC returns zero if the result could be
13310 calculated exactly within the requested precision. */
13313 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13315 tree result = NULL_TREE;
13319 /* To proceed, MPFR must exactly represent the target floating point
13320 format, which only happens when the target base equals two. */
13321 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13322 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13323 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13325 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13326 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13328 if (real_isfinite (re) && real_isfinite (im))
13330 const struct real_format *const fmt =
13331 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13332 const int prec = fmt->p;
13333 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13334 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13338 mpc_init2 (m, prec);
13339 mpfr_from_real (mpc_realref(m), re, rnd);
13340 mpfr_from_real (mpc_imagref(m), im, rnd);
13341 mpfr_clear_flags ();
13342 inexact = func (m, m, crnd);
13343 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13351 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13352 mpc function FUNC on it and return the resulting value as a tree
13353 with type TYPE. The mpfr precision is set to the precision of
13354 TYPE. We assume that function FUNC returns zero if the result
13355 could be calculated exactly within the requested precision. If
13356 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13357 in the arguments and/or results. */
13360 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13361 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13363 tree result = NULL_TREE;
13368 /* To proceed, MPFR must exactly represent the target floating point
13369 format, which only happens when the target base equals two. */
13370 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13372 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13373 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13374 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13376 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13377 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13378 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13379 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13382 || (real_isfinite (re0) && real_isfinite (im0)
13383 && real_isfinite (re1) && real_isfinite (im1)))
13385 const struct real_format *const fmt =
13386 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13387 const int prec = fmt->p;
13388 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13389 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13393 mpc_init2 (m0, prec);
13394 mpc_init2 (m1, prec);
13395 mpfr_from_real (mpc_realref(m0), re0, rnd);
13396 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13397 mpfr_from_real (mpc_realref(m1), re1, rnd);
13398 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13399 mpfr_clear_flags ();
13400 inexact = func (m0, m0, m1, crnd);
13401 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13411 The functions below provide an alternate interface for folding
13412 builtin function calls presented as GIMPLE_CALL statements rather
13413 than as CALL_EXPRs. The folded result is still expressed as a
13414 tree. There is too much code duplication in the handling of
13415 varargs functions, and a more intrusive re-factoring would permit
13416 better sharing of code between the tree and statement-based
13417 versions of these functions. */
13419 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13420 along with N new arguments specified as the "..." parameters. SKIP
13421 is the number of arguments in STMT to be omitted. This function is used
13422 to do varargs-to-varargs transformations. */
13425 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13427 int oldnargs = gimple_call_num_args (stmt);
13428 int nargs = oldnargs - skip + n;
13429 tree fntype = TREE_TYPE (fndecl);
13430 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13434 location_t loc = gimple_location (stmt);
13436 buffer = XALLOCAVEC (tree, nargs);
13438 for (i = 0; i < n; i++)
13439 buffer[i] = va_arg (ap, tree);
13441 for (j = skip; j < oldnargs; j++, i++)
13442 buffer[i] = gimple_call_arg (stmt, j);
13444 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13447 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13448 a normal call should be emitted rather than expanding the function
13449 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13452 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13454 tree dest, size, len, fn, fmt, flag;
13455 const char *fmt_str;
13456 int nargs = gimple_call_num_args (stmt);
13458 /* Verify the required arguments in the original call. */
13461 dest = gimple_call_arg (stmt, 0);
13462 if (!validate_arg (dest, POINTER_TYPE))
13464 flag = gimple_call_arg (stmt, 1);
13465 if (!validate_arg (flag, INTEGER_TYPE))
13467 size = gimple_call_arg (stmt, 2);
13468 if (!validate_arg (size, INTEGER_TYPE))
13470 fmt = gimple_call_arg (stmt, 3);
13471 if (!validate_arg (fmt, POINTER_TYPE))
13474 if (! host_integerp (size, 1))
13479 if (!init_target_chars ())
13482 /* Check whether the format is a literal string constant. */
13483 fmt_str = c_getstr (fmt);
13484 if (fmt_str != NULL)
13486 /* If the format doesn't contain % args or %%, we know the size. */
13487 if (strchr (fmt_str, target_percent) == 0)
13489 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13490 len = build_int_cstu (size_type_node, strlen (fmt_str));
13492 /* If the format is "%s" and first ... argument is a string literal,
13493 we know the size too. */
13494 else if (fcode == BUILT_IN_SPRINTF_CHK
13495 && strcmp (fmt_str, target_percent_s) == 0)
13501 arg = gimple_call_arg (stmt, 4);
13502 if (validate_arg (arg, POINTER_TYPE))
13504 len = c_strlen (arg, 1);
13505 if (! len || ! host_integerp (len, 1))
13512 if (! integer_all_onesp (size))
13514 if (! len || ! tree_int_cst_lt (len, size))
13518 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13519 or if format doesn't contain % chars or is "%s". */
13520 if (! integer_zerop (flag))
13522 if (fmt_str == NULL)
13524 if (strchr (fmt_str, target_percent) != NULL
13525 && strcmp (fmt_str, target_percent_s))
13529 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13530 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13531 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13535 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13538 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13539 a normal call should be emitted rather than expanding the function
13540 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13541 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13542 passed as second argument. */
13545 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13546 enum built_in_function fcode)
13548 tree dest, size, len, fn, fmt, flag;
13549 const char *fmt_str;
13551 /* Verify the required arguments in the original call. */
13552 if (gimple_call_num_args (stmt) < 5)
13554 dest = gimple_call_arg (stmt, 0);
13555 if (!validate_arg (dest, POINTER_TYPE))
13557 len = gimple_call_arg (stmt, 1);
13558 if (!validate_arg (len, INTEGER_TYPE))
13560 flag = gimple_call_arg (stmt, 2);
13561 if (!validate_arg (flag, INTEGER_TYPE))
13563 size = gimple_call_arg (stmt, 3);
13564 if (!validate_arg (size, INTEGER_TYPE))
13566 fmt = gimple_call_arg (stmt, 4);
13567 if (!validate_arg (fmt, POINTER_TYPE))
13570 if (! host_integerp (size, 1))
13573 if (! integer_all_onesp (size))
13575 if (! host_integerp (len, 1))
13577 /* If LEN is not constant, try MAXLEN too.
13578 For MAXLEN only allow optimizing into non-_ocs function
13579 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13580 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13586 if (tree_int_cst_lt (size, maxlen))
13590 if (!init_target_chars ())
13593 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13594 or if format doesn't contain % chars or is "%s". */
13595 if (! integer_zerop (flag))
13597 fmt_str = c_getstr (fmt);
13598 if (fmt_str == NULL)
13600 if (strchr (fmt_str, target_percent) != NULL
13601 && strcmp (fmt_str, target_percent_s))
13605 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13607 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13608 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13612 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13615 /* Builtins with folding operations that operate on "..." arguments
13616 need special handling; we need to store the arguments in a convenient
13617 data structure before attempting any folding. Fortunately there are
13618 only a few builtins that fall into this category. FNDECL is the
13619 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13620 result of the function call is ignored. */
13623 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13624 bool ignore ATTRIBUTE_UNUSED)
13626 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13627 tree ret = NULL_TREE;
13631 case BUILT_IN_SPRINTF_CHK:
13632 case BUILT_IN_VSPRINTF_CHK:
13633 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13636 case BUILT_IN_SNPRINTF_CHK:
13637 case BUILT_IN_VSNPRINTF_CHK:
13638 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13645 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13646 TREE_NO_WARNING (ret) = 1;
13652 /* A wrapper function for builtin folding that prevents warnings for
13653 "statement without effect" and the like, caused by removing the
13654 call node earlier than the warning is generated. */
13657 fold_call_stmt (gimple stmt, bool ignore)
13659 tree ret = NULL_TREE;
13660 tree fndecl = gimple_call_fndecl (stmt);
13661 location_t loc = gimple_location (stmt);
13663 && TREE_CODE (fndecl) == FUNCTION_DECL
13664 && DECL_BUILT_IN (fndecl)
13665 && !gimple_call_va_arg_pack_p (stmt))
13667 int nargs = gimple_call_num_args (stmt);
13669 if (avoid_folding_inline_builtin (fndecl))
13671 /* FIXME: Don't use a list in this interface. */
13672 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13674 tree arglist = NULL_TREE;
13676 for (i = nargs - 1; i >= 0; i--)
13677 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13678 return targetm.fold_builtin (fndecl, arglist, ignore);
13682 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13684 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13686 for (i = 0; i < nargs; i++)
13687 args[i] = gimple_call_arg (stmt, i);
13688 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13691 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13694 /* Propagate location information from original call to
13695 expansion of builtin. Otherwise things like
13696 maybe_emit_chk_warning, that operate on the expansion
13697 of a builtin, will use the wrong location information. */
13698 if (gimple_has_location (stmt))
13700 tree realret = ret;
13701 if (TREE_CODE (ret) == NOP_EXPR)
13702 realret = TREE_OPERAND (ret, 0);
13703 if (CAN_HAVE_LOCATION_P (realret)
13704 && !EXPR_HAS_LOCATION (realret))
13705 SET_EXPR_LOCATION (realret, loc);
13715 /* Look up the function in built_in_decls that corresponds to DECL
13716 and set ASMSPEC as its user assembler name. DECL must be a
13717 function decl that declares a builtin. */
13720 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13723 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13724 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13727 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13728 set_user_assembler_name (builtin, asmspec);
13729 switch (DECL_FUNCTION_CODE (decl))
13731 case BUILT_IN_MEMCPY:
13732 init_block_move_fn (asmspec);
13733 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13735 case BUILT_IN_MEMSET:
13736 init_block_clear_fn (asmspec);
13737 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13739 case BUILT_IN_MEMMOVE:
13740 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13742 case BUILT_IN_MEMCMP:
13743 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13745 case BUILT_IN_ABORT:
13746 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13749 if (INT_TYPE_SIZE < BITS_PER_WORD)
13751 set_user_assembler_libfunc ("ffs", asmspec);
13752 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13753 MODE_INT, 0), "ffs");