1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
457 loc = input_location;
459 src = string_constant (src, &offset_node);
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
473 for (i = 0; i < max; i++)
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
491 else if (! host_integerp (offset_node, 0))
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
526 src = string_constant (src, &offset_node);
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 c_readstr (const char *str, enum machine_mode mode)
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
654 SETUP_FRAME_ADDRESSES ();
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
796 if (i == ARRAY_SIZE (elim_regs))
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1270 apply_args_size (void)
1272 static int size = -1;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1303 apply_args_mode[regno] = VOIDmode;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1313 apply_result_size (void)
1315 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (targetm.calls.function_value_regno_p (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1384 expand_builtin_apply_args_1 (void)
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1463 temp = expand_builtin_apply_args_1 ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1515 do_pending_stack_adjust ();
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 /* Find the CALL insn we just emitted, and attach the register usage
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 /* Restore the return value and note that each value is used. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1687 call_fusage = get_insns ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1910 expand_call (exp, target, 0);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2127 return expand_call (exp, target, target == const0_rtx);
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2218 gcc_assert (result);
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2240 target = expand_call (exp, target, target == const0_rtx);
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2353 expand_builtin_sincos (tree exp)
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2359 location_t loc = EXPR_LOCATION (exp);
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2388 /* Move target1 and target2 to the memory locations indicated
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2404 tree fndecl = get_callee_fndecl (exp);
2406 enum machine_mode mode;
2408 location_t loc = EXPR_LOCATION (exp);
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2430 else if (TARGET_HAS_SINCOS)
2432 tree call, fn = NULL_TREE;
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2477 const char *name = NULL;
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2521 SET_EXPR_LOCATION (fn, loc);
2525 /* Expand a call to one of the builtin rounding functions gcc defines
2526 as an extension (lfloor and lceil). As these are gcc extensions we
2527 do not need to worry about setting errno to EDOM.
2528 If expanding via optab fails, lower expression to (int)(floor(x)).
2529 EXP is the expression that is a call to the builtin function;
2530 if convenient, the result should be placed in TARGET. */
2533 expand_builtin_int_roundingfn (tree exp, rtx target)
2535 convert_optab builtin_optab;
2536 rtx op0, insns, tmp;
2537 tree fndecl = get_callee_fndecl (exp);
2538 enum built_in_function fallback_fn;
2539 tree fallback_fndecl;
2540 enum machine_mode mode;
2543 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2546 arg = CALL_EXPR_ARG (exp, 0);
2548 switch (DECL_FUNCTION_CODE (fndecl))
2550 CASE_FLT_FN (BUILT_IN_LCEIL):
2551 CASE_FLT_FN (BUILT_IN_LLCEIL):
2552 builtin_optab = lceil_optab;
2553 fallback_fn = BUILT_IN_CEIL;
2556 CASE_FLT_FN (BUILT_IN_LFLOOR):
2557 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2558 builtin_optab = lfloor_optab;
2559 fallback_fn = BUILT_IN_FLOOR;
2566 /* Make a suitable register to place result in. */
2567 mode = TYPE_MODE (TREE_TYPE (exp));
2569 target = gen_reg_rtx (mode);
2571 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2572 need to expand the argument again. This way, we will not perform
2573 side-effects more the once. */
2574 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2576 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2580 /* Compute into TARGET. */
2581 if (expand_sfix_optab (target, op0, builtin_optab))
2583 /* Output the entire sequence. */
2584 insns = get_insns ();
2590 /* If we were unable to expand via the builtin, stop the sequence
2591 (without outputting the insns). */
2594 /* Fall back to floating point rounding optab. */
2595 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2597 /* For non-C99 targets we may end up without a fallback fndecl here
2598 if the user called __builtin_lfloor directly. In this case emit
2599 a call to the floor/ceil variants nevertheless. This should result
2600 in the best user experience for not full C99 targets. */
2601 if (fallback_fndecl == NULL_TREE)
2604 const char *name = NULL;
2606 switch (DECL_FUNCTION_CODE (fndecl))
2608 case BUILT_IN_LCEIL:
2609 case BUILT_IN_LLCEIL:
2612 case BUILT_IN_LCEILF:
2613 case BUILT_IN_LLCEILF:
2616 case BUILT_IN_LCEILL:
2617 case BUILT_IN_LLCEILL:
2620 case BUILT_IN_LFLOOR:
2621 case BUILT_IN_LLFLOOR:
2624 case BUILT_IN_LFLOORF:
2625 case BUILT_IN_LLFLOORF:
2628 case BUILT_IN_LFLOORL:
2629 case BUILT_IN_LLFLOORL:
2636 fntype = build_function_type_list (TREE_TYPE (arg),
2637 TREE_TYPE (arg), NULL_TREE);
2638 fallback_fndecl = build_fn_decl (name, fntype);
2641 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2643 tmp = expand_normal (exp);
2645 /* Truncate the result of floating point optab to integer
2646 via expand_fix (). */
2647 target = gen_reg_rtx (mode);
2648 expand_fix (target, tmp, 0);
2653 /* Expand a call to one of the builtin math functions doing integer
2655 Return 0 if a normal call should be emitted rather than expanding the
2656 function in-line. EXP is the expression that is a call to the builtin
2657 function; if convenient, the result should be placed in TARGET. */
2660 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2662 convert_optab builtin_optab;
2664 tree fndecl = get_callee_fndecl (exp);
2666 enum machine_mode mode;
2668 /* There's no easy way to detect the case we need to set EDOM. */
2669 if (flag_errno_math)
2672 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2675 arg = CALL_EXPR_ARG (exp, 0);
2677 switch (DECL_FUNCTION_CODE (fndecl))
2679 CASE_FLT_FN (BUILT_IN_LRINT):
2680 CASE_FLT_FN (BUILT_IN_LLRINT):
2681 builtin_optab = lrint_optab; break;
2682 CASE_FLT_FN (BUILT_IN_LROUND):
2683 CASE_FLT_FN (BUILT_IN_LLROUND):
2684 builtin_optab = lround_optab; break;
2689 /* Make a suitable register to place result in. */
2690 mode = TYPE_MODE (TREE_TYPE (exp));
2692 target = gen_reg_rtx (mode);
2694 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2695 need to expand the argument again. This way, we will not perform
2696 side-effects more the once. */
2697 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2699 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2703 if (expand_sfix_optab (target, op0, builtin_optab))
2705 /* Output the entire sequence. */
2706 insns = get_insns ();
2712 /* If we were unable to expand via the builtin, stop the sequence
2713 (without outputting the insns) and call to the library function
2714 with the stabilized argument list. */
2717 target = expand_call (exp, target, target == const0_rtx);
2722 /* To evaluate powi(x,n), the floating point value x raised to the
2723 constant integer exponent n, we use a hybrid algorithm that
2724 combines the "window method" with look-up tables. For an
2725 introduction to exponentiation algorithms and "addition chains",
2726 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2727 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2728 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2729 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2731 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2732 multiplications to inline before calling the system library's pow
2733 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2734 so this default never requires calling pow, powf or powl. */
2736 #ifndef POWI_MAX_MULTS
2737 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2740 /* The size of the "optimal power tree" lookup table. All
2741 exponents less than this value are simply looked up in the
2742 powi_table below. This threshold is also used to size the
2743 cache of pseudo registers that hold intermediate results. */
2744 #define POWI_TABLE_SIZE 256
2746 /* The size, in bits of the window, used in the "window method"
2747 exponentiation algorithm. This is equivalent to a radix of
2748 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2749 #define POWI_WINDOW_SIZE 3
2751 /* The following table is an efficient representation of an
2752 "optimal power tree". For each value, i, the corresponding
2753 value, j, in the table states than an optimal evaluation
2754 sequence for calculating pow(x,i) can be found by evaluating
2755 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2756 100 integers is given in Knuth's "Seminumerical algorithms". */
2758 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2760 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2761 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2762 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2763 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2764 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2765 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2766 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2767 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2768 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2769 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2770 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2771 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2772 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2773 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2774 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2775 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2776 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2777 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2778 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2779 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2780 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2781 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2782 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2783 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2784 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2785 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2786 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2787 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2788 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2789 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2790 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2791 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2795 /* Return the number of multiplications required to calculate
2796 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2797 subroutine of powi_cost. CACHE is an array indicating
2798 which exponents have already been calculated. */
2801 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2803 /* If we've already calculated this exponent, then this evaluation
2804 doesn't require any additional multiplications. */
2809 return powi_lookup_cost (n - powi_table[n], cache)
2810 + powi_lookup_cost (powi_table[n], cache) + 1;
2813 /* Return the number of multiplications required to calculate
2814 powi(x,n) for an arbitrary x, given the exponent N. This
2815 function needs to be kept in sync with expand_powi below. */
2818 powi_cost (HOST_WIDE_INT n)
2820 bool cache[POWI_TABLE_SIZE];
2821 unsigned HOST_WIDE_INT digit;
2822 unsigned HOST_WIDE_INT val;
2828 /* Ignore the reciprocal when calculating the cost. */
2829 val = (n < 0) ? -n : n;
2831 /* Initialize the exponent cache. */
2832 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2837 while (val >= POWI_TABLE_SIZE)
2841 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2842 result += powi_lookup_cost (digit, cache)
2843 + POWI_WINDOW_SIZE + 1;
2844 val >>= POWI_WINDOW_SIZE;
2853 return result + powi_lookup_cost (val, cache);
2856 /* Recursive subroutine of expand_powi. This function takes the array,
2857 CACHE, of already calculated exponents and an exponent N and returns
2858 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2861 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2863 unsigned HOST_WIDE_INT digit;
2867 if (n < POWI_TABLE_SIZE)
2872 target = gen_reg_rtx (mode);
2875 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2876 op1 = expand_powi_1 (mode, powi_table[n], cache);
2880 target = gen_reg_rtx (mode);
2881 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2882 op0 = expand_powi_1 (mode, n - digit, cache);
2883 op1 = expand_powi_1 (mode, digit, cache);
2887 target = gen_reg_rtx (mode);
2888 op0 = expand_powi_1 (mode, n >> 1, cache);
2892 result = expand_mult (mode, op0, op1, target, 0);
2893 if (result != target)
2894 emit_move_insn (target, result);
2898 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2899 floating point operand in mode MODE, and N is the exponent. This
2900 function needs to be kept in sync with powi_cost above. */
2903 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2905 rtx cache[POWI_TABLE_SIZE];
2909 return CONST1_RTX (mode);
2911 memset (cache, 0, sizeof (cache));
2914 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2916 /* If the original exponent was negative, reciprocate the result. */
2918 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2919 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2924 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2925 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2926 if we can simplify it. */
2928 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2931 if (TREE_CODE (arg1) == REAL_CST
2932 && !TREE_OVERFLOW (arg1)
2933 && flag_unsafe_math_optimizations)
2935 enum machine_mode mode = TYPE_MODE (type);
2936 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2937 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2938 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2939 tree op = NULL_TREE;
2943 /* Optimize pow (x, 0.5) into sqrt. */
2944 if (REAL_VALUES_EQUAL (c, dconsthalf))
2945 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2949 REAL_VALUE_TYPE dconst1_4 = dconst1;
2950 REAL_VALUE_TYPE dconst3_4;
2951 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2953 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2954 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2956 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2957 machines that a builtin sqrt instruction is smaller than a
2958 call to pow with 0.25, so do this optimization even if
2960 if (REAL_VALUES_EQUAL (c, dconst1_4))
2962 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2963 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2966 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2967 are optimizing for space. */
2968 else if (optimize_insn_for_speed_p ()
2969 && !TREE_SIDE_EFFECTS (arg0)
2970 && REAL_VALUES_EQUAL (c, dconst3_4))
2972 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2973 tree sqrt2 = builtin_save_expr (sqrt1);
2974 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2975 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2980 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2981 cbrt/sqrts instead of pow (x, 1./6.). */
2983 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2985 /* First try 1/3. */
2986 REAL_VALUE_TYPE dconst1_3
2987 = real_value_truncate (mode, dconst_third ());
2989 if (REAL_VALUES_EQUAL (c, dconst1_3))
2990 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2993 else if (optimize_insn_for_speed_p ())
2995 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2996 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
2998 if (REAL_VALUES_EQUAL (c, dconst1_6))
3000 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3001 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3007 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3013 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3014 a normal call should be emitted rather than expanding the function
3015 in-line. EXP is the expression that is a call to the builtin
3016 function; if convenient, the result should be placed in TARGET. */
3019 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3023 tree type = TREE_TYPE (exp);
3024 REAL_VALUE_TYPE cint, c, c2;
3027 enum machine_mode mode = TYPE_MODE (type);
3029 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3032 arg0 = CALL_EXPR_ARG (exp, 0);
3033 arg1 = CALL_EXPR_ARG (exp, 1);
3035 if (TREE_CODE (arg1) != REAL_CST
3036 || TREE_OVERFLOW (arg1))
3037 return expand_builtin_mathfn_2 (exp, target, subtarget);
3039 /* Handle constant exponents. */
3041 /* For integer valued exponents we can expand to an optimal multiplication
3042 sequence using expand_powi. */
3043 c = TREE_REAL_CST (arg1);
3044 n = real_to_integer (&c);
3045 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3046 if (real_identical (&c, &cint)
3047 && ((n >= -1 && n <= 2)
3048 || (flag_unsafe_math_optimizations
3049 && optimize_insn_for_speed_p ()
3050 && powi_cost (n) <= POWI_MAX_MULTS)))
3052 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3055 op = force_reg (mode, op);
3056 op = expand_powi (op, mode, n);
3061 narg0 = builtin_save_expr (arg0);
3063 /* If the exponent is not integer valued, check if it is half of an integer.
3064 In this case we can expand to sqrt (x) * x**(n/2). */
3065 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3066 if (fn != NULL_TREE)
3068 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3069 n = real_to_integer (&c2);
3070 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3071 if (real_identical (&c2, &cint)
3072 && ((flag_unsafe_math_optimizations
3073 && optimize_insn_for_speed_p ()
3074 && powi_cost (n/2) <= POWI_MAX_MULTS)
3075 /* Even the c == 0.5 case cannot be done unconditionally
3076 when we need to preserve signed zeros, as
3077 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3078 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3079 /* For c == 1.5 we can assume that x * sqrt (x) is always
3080 smaller than pow (x, 1.5) if sqrt will not be expanded
3083 && (optab_handler (sqrt_optab, mode)->insn_code
3084 != CODE_FOR_nothing))))
3086 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3088 /* Use expand_expr in case the newly built call expression
3089 was folded to a non-call. */
3090 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3093 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3094 op2 = force_reg (mode, op2);
3095 op2 = expand_powi (op2, mode, abs (n / 2));
3096 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3097 0, OPTAB_LIB_WIDEN);
3098 /* If the original exponent was negative, reciprocate the
3101 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3102 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3108 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3110 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3115 /* Try if the exponent is a third of an integer. In this case
3116 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3117 different from pow (x, 1./3.) due to rounding and behavior
3118 with negative x we need to constrain this transformation to
3119 unsafe math and positive x or finite math. */
3120 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3122 && flag_unsafe_math_optimizations
3123 && (tree_expr_nonnegative_p (arg0)
3124 || !HONOR_NANS (mode)))
3126 REAL_VALUE_TYPE dconst3;
3127 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3128 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3129 real_round (&c2, mode, &c2);
3130 n = real_to_integer (&c2);
3131 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3132 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3133 real_convert (&c2, mode, &c2);
3134 if (real_identical (&c2, &c)
3135 && ((optimize_insn_for_speed_p ()
3136 && powi_cost (n/3) <= POWI_MAX_MULTS)
3139 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3141 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3142 if (abs (n) % 3 == 2)
3143 op = expand_simple_binop (mode, MULT, op, op, op,
3144 0, OPTAB_LIB_WIDEN);
3147 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3148 op2 = force_reg (mode, op2);
3149 op2 = expand_powi (op2, mode, abs (n / 3));
3150 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3151 0, OPTAB_LIB_WIDEN);
3152 /* If the original exponent was negative, reciprocate the
3155 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3156 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3162 /* Fall back to optab expansion. */
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3166 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3167 a normal call should be emitted rather than expanding the function
3168 in-line. EXP is the expression that is a call to the builtin
3169 function; if convenient, the result should be placed in TARGET. */
3172 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3176 enum machine_mode mode;
3177 enum machine_mode mode2;
3179 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3182 arg0 = CALL_EXPR_ARG (exp, 0);
3183 arg1 = CALL_EXPR_ARG (exp, 1);
3184 mode = TYPE_MODE (TREE_TYPE (exp));
3186 /* Handle constant power. */
3188 if (TREE_CODE (arg1) == INTEGER_CST
3189 && !TREE_OVERFLOW (arg1))
3191 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3193 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3194 Otherwise, check the number of multiplications required. */
3195 if ((TREE_INT_CST_HIGH (arg1) == 0
3196 || TREE_INT_CST_HIGH (arg1) == -1)
3197 && ((n >= -1 && n <= 2)
3198 || (optimize_insn_for_speed_p ()
3199 && powi_cost (n) <= POWI_MAX_MULTS)))
3201 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3202 op0 = force_reg (mode, op0);
3203 return expand_powi (op0, mode, n);
3207 /* Emit a libcall to libgcc. */
3209 /* Mode of the 2nd argument must match that of an int. */
3210 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3212 if (target == NULL_RTX)
3213 target = gen_reg_rtx (mode);
3215 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3216 if (GET_MODE (op0) != mode)
3217 op0 = convert_to_mode (mode, op0, 0);
3218 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3219 if (GET_MODE (op1) != mode2)
3220 op1 = convert_to_mode (mode2, op1, 0);
3222 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3223 target, LCT_CONST, mode, 2,
3224 op0, mode, op1, mode2);
3229 /* Expand expression EXP which is a call to the strlen builtin. Return
3230 NULL_RTX if we failed the caller should emit a normal call, otherwise
3231 try to get the result in TARGET, if convenient. */
3234 expand_builtin_strlen (tree exp, rtx target,
3235 enum machine_mode target_mode)
3237 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3243 tree src = CALL_EXPR_ARG (exp, 0);
3244 rtx result, src_reg, char_rtx, before_strlen;
3245 enum machine_mode insn_mode = target_mode, char_mode;
3246 enum insn_code icode = CODE_FOR_nothing;
3249 /* If the length can be computed at compile-time, return it. */
3250 len = c_strlen (src, 0);
3252 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3254 /* If the length can be computed at compile-time and is constant
3255 integer, but there are side-effects in src, evaluate
3256 src for side-effects, then return len.
3257 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3258 can be optimized into: i++; x = 3; */
3259 len = c_strlen (src, 1);
3260 if (len && TREE_CODE (len) == INTEGER_CST)
3262 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3263 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3266 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3268 /* If SRC is not a pointer type, don't do this operation inline. */
3272 /* Bail out if we can't compute strlen in the right mode. */
3273 while (insn_mode != VOIDmode)
3275 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3276 if (icode != CODE_FOR_nothing)
3279 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3281 if (insn_mode == VOIDmode)
3284 /* Make a place to write the result of the instruction. */
3288 && GET_MODE (result) == insn_mode
3289 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3290 result = gen_reg_rtx (insn_mode);
3292 /* Make a place to hold the source address. We will not expand
3293 the actual source until we are sure that the expansion will
3294 not fail -- there are trees that cannot be expanded twice. */
3295 src_reg = gen_reg_rtx (Pmode);
3297 /* Mark the beginning of the strlen sequence so we can emit the
3298 source operand later. */
3299 before_strlen = get_last_insn ();
3301 char_rtx = const0_rtx;
3302 char_mode = insn_data[(int) icode].operand[2].mode;
3303 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3305 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3307 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3308 char_rtx, GEN_INT (align));
3313 /* Now that we are assured of success, expand the source. */
3315 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3317 emit_move_insn (src_reg, pat);
3322 emit_insn_after (pat, before_strlen);
3324 emit_insn_before (pat, get_insns ());
3326 /* Return the value in the proper mode for this function. */
3327 if (GET_MODE (result) == target_mode)
3329 else if (target != 0)
3330 convert_move (target, result, 0);
3332 target = convert_to_mode (target_mode, result, 0);
3338 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3339 bytes from constant string DATA + OFFSET and return it as target
3343 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3344 enum machine_mode mode)
3346 const char *str = (const char *) data;
3348 gcc_assert (offset >= 0
3349 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3350 <= strlen (str) + 1));
3352 return c_readstr (str + offset, mode);
3355 /* Expand a call EXP to the memcpy builtin.
3356 Return NULL_RTX if we failed, the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3361 expand_builtin_memcpy (tree exp, rtx target)
3363 if (!validate_arglist (exp,
3364 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3368 tree dest = CALL_EXPR_ARG (exp, 0);
3369 tree src = CALL_EXPR_ARG (exp, 1);
3370 tree len = CALL_EXPR_ARG (exp, 2);
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3375 rtx dest_mem, src_mem, dest_addr, len_rtx;
3376 HOST_WIDE_INT expected_size = -1;
3377 unsigned int expected_align = 0;
3379 /* If DEST is not a pointer type, call the normal function. */
3380 if (dest_align == 0)
3383 /* If either SRC is not a pointer type, don't do this
3384 operation in-line. */
3388 if (currently_expanding_gimple_stmt)
3389 stringop_block_profile (currently_expanding_gimple_stmt,
3390 &expected_align, &expected_size);
3392 if (expected_align < dest_align)
3393 expected_align = dest_align;
3394 dest_mem = get_memory_rtx (dest, len);
3395 set_mem_align (dest_mem, dest_align);
3396 len_rtx = expand_normal (len);
3397 src_str = c_getstr (src);
3399 /* If SRC is a string constant and block move would be done
3400 by pieces, we can avoid loading the string from memory
3401 and only stored the computed constants. */
3403 && CONST_INT_P (len_rtx)
3404 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3405 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3409 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3410 builtin_memcpy_read_str,
3411 CONST_CAST (char *, src_str),
3412 dest_align, false, 0);
3413 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3414 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 src_mem = get_memory_rtx (src, len);
3419 set_mem_align (src_mem, src_align);
3421 /* Copy word part most expediently. */
3422 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3423 CALL_EXPR_TAILCALL (exp)
3424 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3425 expected_align, expected_size);
3429 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3430 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3436 /* Expand a call EXP to the mempcpy builtin.
3437 Return NULL_RTX if we failed; the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). If ENDP is 0 return the
3440 destination pointer, if ENDP is 1 return the end pointer ala
3441 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3455 return expand_builtin_mempcpy_args (dest, src, len,
3456 target, mode, /*endp=*/ 1);
3460 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3461 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3462 so that this can also be called without constructing an actual CALL_EXPR.
3463 The other arguments and return value are the same as for
3464 expand_builtin_mempcpy. */
3467 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3468 rtx target, enum machine_mode mode, int endp)
3470 /* If return value is ignored, transform mempcpy into memcpy. */
3471 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3473 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3474 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3476 return expand_expr (result, target, mode, EXPAND_NORMAL);
3480 const char *src_str;
3481 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3482 unsigned int dest_align
3483 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3484 rtx dest_mem, src_mem, len_rtx;
3486 /* If either SRC or DEST is not a pointer type, don't do this
3487 operation in-line. */
3488 if (dest_align == 0 || src_align == 0)
3491 /* If LEN is not constant, call the normal function. */
3492 if (! host_integerp (len, 1))
3495 len_rtx = expand_normal (len);
3496 src_str = c_getstr (src);
3498 /* If SRC is a string constant and block move would be done
3499 by pieces, we can avoid loading the string from memory
3500 and only stored the computed constants. */
3502 && CONST_INT_P (len_rtx)
3503 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3504 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3505 CONST_CAST (char *, src_str),
3508 dest_mem = get_memory_rtx (dest, len);
3509 set_mem_align (dest_mem, dest_align);
3510 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3511 builtin_memcpy_read_str,
3512 CONST_CAST (char *, src_str),
3513 dest_align, false, endp);
3514 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3515 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3519 if (CONST_INT_P (len_rtx)
3520 && can_move_by_pieces (INTVAL (len_rtx),
3521 MIN (dest_align, src_align)))
3523 dest_mem = get_memory_rtx (dest, len);
3524 set_mem_align (dest_mem, dest_align);
3525 src_mem = get_memory_rtx (src, len);
3526 set_mem_align (src_mem, src_align);
3527 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3528 MIN (dest_align, src_align), endp);
3529 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3530 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3539 # define HAVE_movstr 0
3540 # define CODE_FOR_movstr CODE_FOR_nothing
3543 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3544 we failed, the caller should emit a normal call, otherwise try to
3545 get the result in TARGET, if convenient. If ENDP is 0 return the
3546 destination pointer, if ENDP is 1 return the end pointer ala
3547 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3551 expand_movstr (tree dest, tree src, rtx target, int endp)
3557 const struct insn_data_d * data;
3562 dest_mem = get_memory_rtx (dest, NULL);
3563 src_mem = get_memory_rtx (src, NULL);
3564 data = insn_data + CODE_FOR_movstr;
3567 target = force_reg (Pmode, XEXP (dest_mem, 0));
3568 dest_mem = replace_equiv_address (dest_mem, target);
3569 end = gen_reg_rtx (Pmode);
3574 || target == const0_rtx
3575 || ! (*data->operand[0].predicate) (target, Pmode))
3577 end = gen_reg_rtx (Pmode);
3578 if (target != const0_rtx)
3585 if (data->operand[0].mode != VOIDmode)
3586 end = gen_lowpart (data->operand[0].mode, end);
3588 insn = data->genfun (end, dest_mem, src_mem);
3594 /* movstr is supposed to set end to the address of the NUL
3595 terminator. If the caller requested a mempcpy-like return value,
3597 if (endp == 1 && target != const0_rtx)
3599 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3600 emit_move_insn (target, force_operand (tem, NULL_RTX));
3606 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3607 NULL_RTX if we failed the caller should emit a normal call, otherwise
3608 try to get the result in TARGET, if convenient (and in mode MODE if that's
3612 expand_builtin_strcpy (tree exp, rtx target)
3614 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3616 tree dest = CALL_EXPR_ARG (exp, 0);
3617 tree src = CALL_EXPR_ARG (exp, 1);
3618 return expand_builtin_strcpy_args (dest, src, target);
3623 /* Helper function to do the actual work for expand_builtin_strcpy. The
3624 arguments to the builtin_strcpy call DEST and SRC are broken out
3625 so that this can also be called without constructing an actual CALL_EXPR.
3626 The other arguments and return value are the same as for
3627 expand_builtin_strcpy. */
3630 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3632 return expand_movstr (dest, src, target, /*endp=*/0);
3635 /* Expand a call EXP to the stpcpy builtin.
3636 Return NULL_RTX if we failed the caller should emit a normal call,
3637 otherwise try to get the result in TARGET, if convenient (and in
3638 mode MODE if that's convenient). */
3641 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3644 location_t loc = EXPR_LOCATION (exp);
3646 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3649 dst = CALL_EXPR_ARG (exp, 0);
3650 src = CALL_EXPR_ARG (exp, 1);
3652 /* If return value is ignored, transform stpcpy into strcpy. */
3653 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3655 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3656 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3657 return expand_expr (result, target, mode, EXPAND_NORMAL);
3664 /* Ensure we get an actual string whose length can be evaluated at
3665 compile-time, not an expression containing a string. This is
3666 because the latter will potentially produce pessimized code
3667 when used to produce the return value. */
3668 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3669 return expand_movstr (dst, src, target, /*endp=*/2);
3671 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3672 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3673 target, mode, /*endp=*/2);
3678 if (TREE_CODE (len) == INTEGER_CST)
3680 rtx len_rtx = expand_normal (len);
3682 if (CONST_INT_P (len_rtx))
3684 ret = expand_builtin_strcpy_args (dst, src, target);
3690 if (mode != VOIDmode)
3691 target = gen_reg_rtx (mode);
3693 target = gen_reg_rtx (GET_MODE (ret));
3695 if (GET_MODE (target) != GET_MODE (ret))
3696 ret = gen_lowpart (GET_MODE (target), ret);
3698 ret = plus_constant (ret, INTVAL (len_rtx));
3699 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3707 return expand_movstr (dst, src, target, /*endp=*/2);
3711 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3712 bytes from constant string DATA + OFFSET and return it as target
3716 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3717 enum machine_mode mode)
3719 const char *str = (const char *) data;
3721 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3724 return c_readstr (str + offset, mode);
3727 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3728 NULL_RTX if we failed the caller should emit a normal call. */
3731 expand_builtin_strncpy (tree exp, rtx target)
3733 location_t loc = EXPR_LOCATION (exp);
3735 if (validate_arglist (exp,
3736 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3738 tree dest = CALL_EXPR_ARG (exp, 0);
3739 tree src = CALL_EXPR_ARG (exp, 1);
3740 tree len = CALL_EXPR_ARG (exp, 2);
3741 tree slen = c_strlen (src, 1);
3743 /* We must be passed a constant len and src parameter. */
3744 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3747 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3749 /* We're required to pad with trailing zeros if the requested
3750 len is greater than strlen(s2)+1. In that case try to
3751 use store_by_pieces, if it fails, punt. */
3752 if (tree_int_cst_lt (slen, len))
3754 unsigned int dest_align
3755 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3756 const char *p = c_getstr (src);
3759 if (!p || dest_align == 0 || !host_integerp (len, 1)
3760 || !can_store_by_pieces (tree_low_cst (len, 1),
3761 builtin_strncpy_read_str,
3762 CONST_CAST (char *, p),
3766 dest_mem = get_memory_rtx (dest, len);
3767 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3768 builtin_strncpy_read_str,
3769 CONST_CAST (char *, p), dest_align, false, 0);
3770 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3771 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3778 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3779 bytes from constant string DATA + OFFSET and return it as target
3783 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3784 enum machine_mode mode)
3786 const char *c = (const char *) data;
3787 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3789 memset (p, *c, GET_MODE_SIZE (mode));
3791 return c_readstr (p, mode);
3794 /* Callback routine for store_by_pieces. Return the RTL of a register
3795 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3796 char value given in the RTL register data. For example, if mode is
3797 4 bytes wide, return the RTL for 0x01010101*data. */
3800 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3801 enum machine_mode mode)
3807 size = GET_MODE_SIZE (mode);
3811 p = XALLOCAVEC (char, size);
3812 memset (p, 1, size);
3813 coeff = c_readstr (p, mode);
3815 target = convert_to_mode (mode, (rtx) data, 1);
3816 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3817 return force_reg (mode, target);
3820 /* Expand expression EXP, which is a call to the memset builtin. Return
3821 NULL_RTX if we failed the caller should emit a normal call, otherwise
3822 try to get the result in TARGET, if convenient (and in mode MODE if that's
3826 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3828 if (!validate_arglist (exp,
3829 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3833 tree dest = CALL_EXPR_ARG (exp, 0);
3834 tree val = CALL_EXPR_ARG (exp, 1);
3835 tree len = CALL_EXPR_ARG (exp, 2);
3836 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3840 /* Helper function to do the actual work for expand_builtin_memset. The
3841 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3842 so that this can also be called without constructing an actual CALL_EXPR.
3843 The other arguments and return value are the same as for
3844 expand_builtin_memset. */
3847 expand_builtin_memset_args (tree dest, tree val, tree len,
3848 rtx target, enum machine_mode mode, tree orig_exp)
3851 enum built_in_function fcode;
3853 unsigned int dest_align;
3854 rtx dest_mem, dest_addr, len_rtx;
3855 HOST_WIDE_INT expected_size = -1;
3856 unsigned int expected_align = 0;
3858 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3860 /* If DEST is not a pointer type, don't do this operation in-line. */
3861 if (dest_align == 0)
3864 if (currently_expanding_gimple_stmt)
3865 stringop_block_profile (currently_expanding_gimple_stmt,
3866 &expected_align, &expected_size);
3868 if (expected_align < dest_align)
3869 expected_align = dest_align;
3871 /* If the LEN parameter is zero, return DEST. */
3872 if (integer_zerop (len))
3874 /* Evaluate and ignore VAL in case it has side-effects. */
3875 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3876 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3879 /* Stabilize the arguments in case we fail. */
3880 dest = builtin_save_expr (dest);
3881 val = builtin_save_expr (val);
3882 len = builtin_save_expr (len);
3884 len_rtx = expand_normal (len);
3885 dest_mem = get_memory_rtx (dest, len);
3887 if (TREE_CODE (val) != INTEGER_CST)
3891 val_rtx = expand_normal (val);
3892 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3895 /* Assume that we can memset by pieces if we can store
3896 * the coefficients by pieces (in the required modes).
3897 * We can't pass builtin_memset_gen_str as that emits RTL. */
3899 if (host_integerp (len, 1)
3900 && can_store_by_pieces (tree_low_cst (len, 1),
3901 builtin_memset_read_str, &c, dest_align,
3904 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3906 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3907 builtin_memset_gen_str, val_rtx, dest_align,
3910 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3911 dest_align, expected_align,
3915 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3916 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3920 if (target_char_cast (val, &c))
3925 if (host_integerp (len, 1)
3926 && can_store_by_pieces (tree_low_cst (len, 1),
3927 builtin_memset_read_str, &c, dest_align,
3929 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3930 builtin_memset_read_str, &c, dest_align, true, 0);
3931 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3932 dest_align, expected_align,
3936 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3937 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3941 set_mem_align (dest_mem, dest_align);
3942 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3943 CALL_EXPR_TAILCALL (orig_exp)
3944 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3945 expected_align, expected_size);
3949 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3950 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3956 fndecl = get_callee_fndecl (orig_exp);
3957 fcode = DECL_FUNCTION_CODE (fndecl);
3958 if (fcode == BUILT_IN_MEMSET)
3959 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3961 else if (fcode == BUILT_IN_BZERO)
3962 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3966 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3967 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3968 return expand_call (fn, target, target == const0_rtx);
3971 /* Expand expression EXP, which is a call to the bzero builtin. Return
3972 NULL_RTX if we failed the caller should emit a normal call. */
3975 expand_builtin_bzero (tree exp)
3978 location_t loc = EXPR_LOCATION (exp);
3980 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3983 dest = CALL_EXPR_ARG (exp, 0);
3984 size = CALL_EXPR_ARG (exp, 1);
3986 /* New argument list transforming bzero(ptr x, int y) to
3987 memset(ptr x, int 0, size_t y). This is done this way
3988 so that if it isn't expanded inline, we fallback to
3989 calling bzero instead of memset. */
3991 return expand_builtin_memset_args (dest, integer_zero_node,
3992 fold_convert_loc (loc, sizetype, size),
3993 const0_rtx, VOIDmode, exp);
3996 /* Expand expression EXP, which is a call to the memcmp built-in function.
3997 Return NULL_RTX if we failed and the
3998 caller should emit a normal call, otherwise try to get the result in
3999 TARGET, if convenient (and in mode MODE, if that's convenient). */
4002 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4003 ATTRIBUTE_UNUSED enum machine_mode mode)
4005 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4007 if (!validate_arglist (exp,
4008 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4011 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4013 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4016 tree arg1 = CALL_EXPR_ARG (exp, 0);
4017 tree arg2 = CALL_EXPR_ARG (exp, 1);
4018 tree len = CALL_EXPR_ARG (exp, 2);
4021 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4023 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4024 enum machine_mode insn_mode;
4026 #ifdef HAVE_cmpmemsi
4028 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4031 #ifdef HAVE_cmpstrnsi
4033 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4038 /* If we don't have POINTER_TYPE, call the function. */
4039 if (arg1_align == 0 || arg2_align == 0)
4042 /* Make a place to write the result of the instruction. */
4045 && REG_P (result) && GET_MODE (result) == insn_mode
4046 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4047 result = gen_reg_rtx (insn_mode);
4049 arg1_rtx = get_memory_rtx (arg1, len);
4050 arg2_rtx = get_memory_rtx (arg2, len);
4051 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4053 /* Set MEM_SIZE as appropriate. */
4054 if (CONST_INT_P (arg3_rtx))
4056 set_mem_size (arg1_rtx, arg3_rtx);
4057 set_mem_size (arg2_rtx, arg3_rtx);
4060 #ifdef HAVE_cmpmemsi
4062 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4063 GEN_INT (MIN (arg1_align, arg2_align)));
4066 #ifdef HAVE_cmpstrnsi
4068 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4069 GEN_INT (MIN (arg1_align, arg2_align)));
4077 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4078 TYPE_MODE (integer_type_node), 3,
4079 XEXP (arg1_rtx, 0), Pmode,
4080 XEXP (arg2_rtx, 0), Pmode,
4081 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4082 TYPE_UNSIGNED (sizetype)),
4083 TYPE_MODE (sizetype));
4085 /* Return the value in the proper mode for this function. */
4086 mode = TYPE_MODE (TREE_TYPE (exp));
4087 if (GET_MODE (result) == mode)
4089 else if (target != 0)
4091 convert_move (target, result, 0);
4095 return convert_to_mode (mode, result, 0);
4102 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4103 if we failed the caller should emit a normal call, otherwise try to get
4104 the result in TARGET, if convenient. */
4107 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4109 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4112 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4113 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4114 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4116 rtx arg1_rtx, arg2_rtx;
4117 rtx result, insn = NULL_RTX;
4119 tree arg1 = CALL_EXPR_ARG (exp, 0);
4120 tree arg2 = CALL_EXPR_ARG (exp, 1);
4123 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4125 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4127 /* If we don't have POINTER_TYPE, call the function. */
4128 if (arg1_align == 0 || arg2_align == 0)
4131 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4132 arg1 = builtin_save_expr (arg1);
4133 arg2 = builtin_save_expr (arg2);
4135 arg1_rtx = get_memory_rtx (arg1, NULL);
4136 arg2_rtx = get_memory_rtx (arg2, NULL);
4138 #ifdef HAVE_cmpstrsi
4139 /* Try to call cmpstrsi. */
4142 enum machine_mode insn_mode
4143 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4145 /* Make a place to write the result of the instruction. */
4148 && REG_P (result) && GET_MODE (result) == insn_mode
4149 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4150 result = gen_reg_rtx (insn_mode);
4152 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4153 GEN_INT (MIN (arg1_align, arg2_align)));
4156 #ifdef HAVE_cmpstrnsi
4157 /* Try to determine at least one length and call cmpstrnsi. */
4158 if (!insn && HAVE_cmpstrnsi)
4163 enum machine_mode insn_mode
4164 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4165 tree len1 = c_strlen (arg1, 1);
4166 tree len2 = c_strlen (arg2, 1);
4169 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4171 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4173 /* If we don't have a constant length for the first, use the length
4174 of the second, if we know it. We don't require a constant for
4175 this case; some cost analysis could be done if both are available
4176 but neither is constant. For now, assume they're equally cheap,
4177 unless one has side effects. If both strings have constant lengths,
4184 else if (TREE_SIDE_EFFECTS (len1))
4186 else if (TREE_SIDE_EFFECTS (len2))
4188 else if (TREE_CODE (len1) != INTEGER_CST)
4190 else if (TREE_CODE (len2) != INTEGER_CST)
4192 else if (tree_int_cst_lt (len1, len2))
4197 /* If both arguments have side effects, we cannot optimize. */
4198 if (!len || TREE_SIDE_EFFECTS (len))
4201 arg3_rtx = expand_normal (len);
4203 /* Make a place to write the result of the instruction. */
4206 && REG_P (result) && GET_MODE (result) == insn_mode
4207 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4208 result = gen_reg_rtx (insn_mode);
4210 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4211 GEN_INT (MIN (arg1_align, arg2_align)));
4217 enum machine_mode mode;
4220 /* Return the value in the proper mode for this function. */
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 if (GET_MODE (result) == mode)
4225 return convert_to_mode (mode, result, 0);
4226 convert_move (target, result, 0);
4230 /* Expand the library call ourselves using a stabilized argument
4231 list to avoid re-evaluating the function's arguments twice. */
4232 #ifdef HAVE_cmpstrnsi
4235 fndecl = get_callee_fndecl (exp);
4236 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4237 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4238 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4239 return expand_call (fn, target, target == const0_rtx);
4245 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4246 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4247 the result in TARGET, if convenient. */
4250 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4251 ATTRIBUTE_UNUSED enum machine_mode mode)
4253 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4259 /* If c_strlen can determine an expression for one of the string
4260 lengths, and it doesn't have side effects, then emit cmpstrnsi
4261 using length MIN(strlen(string)+1, arg3). */
4262 #ifdef HAVE_cmpstrnsi
4265 tree len, len1, len2;
4266 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4269 tree arg1 = CALL_EXPR_ARG (exp, 0);
4270 tree arg2 = CALL_EXPR_ARG (exp, 1);
4271 tree arg3 = CALL_EXPR_ARG (exp, 2);
4274 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4276 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4277 enum machine_mode insn_mode
4278 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4280 len1 = c_strlen (arg1, 1);
4281 len2 = c_strlen (arg2, 1);
4284 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4286 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4288 /* If we don't have a constant length for the first, use the length
4289 of the second, if we know it. We don't require a constant for
4290 this case; some cost analysis could be done if both are available
4291 but neither is constant. For now, assume they're equally cheap,
4292 unless one has side effects. If both strings have constant lengths,
4299 else if (TREE_SIDE_EFFECTS (len1))
4301 else if (TREE_SIDE_EFFECTS (len2))
4303 else if (TREE_CODE (len1) != INTEGER_CST)
4305 else if (TREE_CODE (len2) != INTEGER_CST)
4307 else if (tree_int_cst_lt (len1, len2))
4312 /* If both arguments have side effects, we cannot optimize. */
4313 if (!len || TREE_SIDE_EFFECTS (len))
4316 /* The actual new length parameter is MIN(len,arg3). */
4317 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4318 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4320 /* If we don't have POINTER_TYPE, call the function. */
4321 if (arg1_align == 0 || arg2_align == 0)
4324 /* Make a place to write the result of the instruction. */
4327 && REG_P (result) && GET_MODE (result) == insn_mode
4328 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4329 result = gen_reg_rtx (insn_mode);
4331 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4332 arg1 = builtin_save_expr (arg1);
4333 arg2 = builtin_save_expr (arg2);
4334 len = builtin_save_expr (len);
4336 arg1_rtx = get_memory_rtx (arg1, len);
4337 arg2_rtx = get_memory_rtx (arg2, len);
4338 arg3_rtx = expand_normal (len);
4339 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4340 GEN_INT (MIN (arg1_align, arg2_align)));
4345 /* Return the value in the proper mode for this function. */
4346 mode = TYPE_MODE (TREE_TYPE (exp));
4347 if (GET_MODE (result) == mode)
4350 return convert_to_mode (mode, result, 0);
4351 convert_move (target, result, 0);
4355 /* Expand the library call ourselves using a stabilized argument
4356 list to avoid re-evaluating the function's arguments twice. */
4357 fndecl = get_callee_fndecl (exp);
4358 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4360 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4361 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4362 return expand_call (fn, target, target == const0_rtx);
4368 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4369 if that's convenient. */
4372 expand_builtin_saveregs (void)
4376 /* Don't do __builtin_saveregs more than once in a function.
4377 Save the result of the first call and reuse it. */
4378 if (saveregs_value != 0)
4379 return saveregs_value;
4381 /* When this function is called, it means that registers must be
4382 saved on entry to this function. So we migrate the call to the
4383 first insn of this function. */
4387 /* Do whatever the machine needs done in this case. */
4388 val = targetm.calls.expand_builtin_saveregs ();
4393 saveregs_value = val;
4395 /* Put the insns after the NOTE that starts the function. If this
4396 is inside a start_sequence, make the outer-level insn chain current, so
4397 the code is placed at the start of the function. */
4398 push_topmost_sequence ();
4399 emit_insn_after (seq, entry_of_function ());
4400 pop_topmost_sequence ();
4405 /* __builtin_args_info (N) returns word N of the arg space info
4406 for the current function. The number and meanings of words
4407 is controlled by the definition of CUMULATIVE_ARGS. */
4410 expand_builtin_args_info (tree exp)
4412 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4413 int *word_ptr = (int *) &crtl->args.info;
4415 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4417 if (call_expr_nargs (exp) != 0)
4419 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4420 error ("argument of %<__builtin_args_info%> must be constant");
4423 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4425 if (wordnum < 0 || wordnum >= nwords)
4426 error ("argument of %<__builtin_args_info%> out of range");
4428 return GEN_INT (word_ptr[wordnum]);
4432 error ("missing argument in %<__builtin_args_info%>");
4437 /* Expand a call to __builtin_next_arg. */
4440 expand_builtin_next_arg (void)
4442 /* Checking arguments is already done in fold_builtin_next_arg
4443 that must be called before this function. */
4444 return expand_binop (ptr_mode, add_optab,
4445 crtl->args.internal_arg_pointer,
4446 crtl->args.arg_offset_rtx,
4447 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4450 /* Make it easier for the backends by protecting the valist argument
4451 from multiple evaluations. */
4454 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4456 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4458 /* The current way of determining the type of valist is completely
4459 bogus. We should have the information on the va builtin instead. */
4461 vatype = targetm.fn_abi_va_list (cfun->decl);
4463 if (TREE_CODE (vatype) == ARRAY_TYPE)
4465 if (TREE_SIDE_EFFECTS (valist))
4466 valist = save_expr (valist);
4468 /* For this case, the backends will be expecting a pointer to
4469 vatype, but it's possible we've actually been given an array
4470 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4472 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4474 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4475 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4480 tree pt = build_pointer_type (vatype);
4484 if (! TREE_SIDE_EFFECTS (valist))
4487 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4488 TREE_SIDE_EFFECTS (valist) = 1;
4491 if (TREE_SIDE_EFFECTS (valist))
4492 valist = save_expr (valist);
4493 valist = fold_build2_loc (loc, MEM_REF,
4494 vatype, valist, build_int_cst (pt, 0));
4500 /* The "standard" definition of va_list is void*. */
4503 std_build_builtin_va_list (void)
4505 return ptr_type_node;
4508 /* The "standard" abi va_list is va_list_type_node. */
4511 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4513 return va_list_type_node;
4516 /* The "standard" type of va_list is va_list_type_node. */
4519 std_canonical_va_list_type (tree type)
4523 if (INDIRECT_REF_P (type))
4524 type = TREE_TYPE (type);
4525 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4526 type = TREE_TYPE (type);
4527 wtype = va_list_type_node;
4529 /* Treat structure va_list types. */
4530 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4531 htype = TREE_TYPE (htype);
4532 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4534 /* If va_list is an array type, the argument may have decayed
4535 to a pointer type, e.g. by being passed to another function.
4536 In that case, unwrap both types so that we can compare the
4537 underlying records. */
4538 if (TREE_CODE (htype) == ARRAY_TYPE
4539 || POINTER_TYPE_P (htype))
4541 wtype = TREE_TYPE (wtype);
4542 htype = TREE_TYPE (htype);
4545 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4546 return va_list_type_node;
4551 /* The "standard" implementation of va_start: just assign `nextarg' to
4555 std_expand_builtin_va_start (tree valist, rtx nextarg)
4557 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4558 convert_move (va_r, nextarg, 0);
4561 /* Expand EXP, a call to __builtin_va_start. */
4564 expand_builtin_va_start (tree exp)
4568 location_t loc = EXPR_LOCATION (exp);
4570 if (call_expr_nargs (exp) < 2)
4572 error_at (loc, "too few arguments to function %<va_start%>");
4576 if (fold_builtin_next_arg (exp, true))
4579 nextarg = expand_builtin_next_arg ();
4580 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4582 if (targetm.expand_builtin_va_start)
4583 targetm.expand_builtin_va_start (valist, nextarg);
4585 std_expand_builtin_va_start (valist, nextarg);
4590 /* The "standard" implementation of va_arg: read the value from the
4591 current (padded) address and increment by the (padded) size. */
4594 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4597 tree addr, t, type_size, rounded_size, valist_tmp;
4598 unsigned HOST_WIDE_INT align, boundary;
4601 #ifdef ARGS_GROW_DOWNWARD
4602 /* All of the alignment and movement below is for args-grow-up machines.
4603 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4604 implement their own specialized gimplify_va_arg_expr routines. */
4608 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4610 type = build_pointer_type (type);
4612 align = PARM_BOUNDARY / BITS_PER_UNIT;
4613 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4615 /* When we align parameter on stack for caller, if the parameter
4616 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4617 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4618 here with caller. */
4619 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4620 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4622 boundary /= BITS_PER_UNIT;
4624 /* Hoist the valist value into a temporary for the moment. */
4625 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4627 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4628 requires greater alignment, we must perform dynamic alignment. */
4629 if (boundary > align
4630 && !integer_zerop (TYPE_SIZE (type)))
4632 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4633 fold_build2 (POINTER_PLUS_EXPR,
4635 valist_tmp, size_int (boundary - 1)));
4636 gimplify_and_add (t, pre_p);
4638 t = fold_convert (sizetype, valist_tmp);
4639 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4640 fold_convert (TREE_TYPE (valist),
4641 fold_build2 (BIT_AND_EXPR, sizetype, t,
4642 size_int (-boundary))));
4643 gimplify_and_add (t, pre_p);
4648 /* If the actual alignment is less than the alignment of the type,
4649 adjust the type accordingly so that we don't assume strict alignment
4650 when dereferencing the pointer. */
4651 boundary *= BITS_PER_UNIT;
4652 if (boundary < TYPE_ALIGN (type))
4654 type = build_variant_type_copy (type);
4655 TYPE_ALIGN (type) = boundary;
4658 /* Compute the rounded size of the type. */
4659 type_size = size_in_bytes (type);
4660 rounded_size = round_up (type_size, align);
4662 /* Reduce rounded_size so it's sharable with the postqueue. */
4663 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4667 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4669 /* Small args are padded downward. */
4670 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4671 rounded_size, size_int (align));
4672 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4673 size_binop (MINUS_EXPR, rounded_size, type_size));
4674 addr = fold_build2 (POINTER_PLUS_EXPR,
4675 TREE_TYPE (addr), addr, t);
4678 /* Compute new value for AP. */
4679 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4680 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4681 gimplify_and_add (t, pre_p);
4683 addr = fold_convert (build_pointer_type (type), addr);
4686 addr = build_va_arg_indirect_ref (addr);
4688 return build_va_arg_indirect_ref (addr);
4691 /* Build an indirect-ref expression over the given TREE, which represents a
4692 piece of a va_arg() expansion. */
4694 build_va_arg_indirect_ref (tree addr)
4696 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4698 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4704 /* Return a dummy expression of type TYPE in order to keep going after an
4708 dummy_object (tree type)
4710 tree t = build_int_cst (build_pointer_type (type), 0);
4711 return build1 (INDIRECT_REF, type, t);
4714 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4715 builtin function, but a very special sort of operator. */
4717 enum gimplify_status
4718 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4720 tree promoted_type, have_va_type;
4721 tree valist = TREE_OPERAND (*expr_p, 0);
4722 tree type = TREE_TYPE (*expr_p);
4724 location_t loc = EXPR_LOCATION (*expr_p);
4726 /* Verify that valist is of the proper type. */
4727 have_va_type = TREE_TYPE (valist);
4728 if (have_va_type == error_mark_node)
4730 have_va_type = targetm.canonical_va_list_type (have_va_type);
4732 if (have_va_type == NULL_TREE)
4734 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4738 /* Generate a diagnostic for requesting data of a type that cannot
4739 be passed through `...' due to type promotion at the call site. */
4740 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4743 static bool gave_help;
4746 /* Unfortunately, this is merely undefined, rather than a constraint
4747 violation, so we cannot make this an error. If this call is never
4748 executed, the program is still strictly conforming. */
4749 warned = warning_at (loc, 0,
4750 "%qT is promoted to %qT when passed through %<...%>",
4751 type, promoted_type);
4752 if (!gave_help && warned)
4755 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4756 promoted_type, type);
4759 /* We can, however, treat "undefined" any way we please.
4760 Call abort to encourage the user to fix the program. */
4762 inform (loc, "if this code is reached, the program will abort");
4763 /* Before the abort, allow the evaluation of the va_list
4764 expression to exit or longjmp. */
4765 gimplify_and_add (valist, pre_p);
4766 t = build_call_expr_loc (loc,
4767 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4768 gimplify_and_add (t, pre_p);
4770 /* This is dead code, but go ahead and finish so that the
4771 mode of the result comes out right. */
4772 *expr_p = dummy_object (type);
4777 /* Make it easier for the backends by protecting the valist argument
4778 from multiple evaluations. */
4779 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4781 /* For this case, the backends will be expecting a pointer to
4782 TREE_TYPE (abi), but it's possible we've
4783 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4785 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4787 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4788 valist = fold_convert_loc (loc, p1,
4789 build_fold_addr_expr_loc (loc, valist));
4792 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4795 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4797 if (!targetm.gimplify_va_arg_expr)
4798 /* FIXME: Once most targets are converted we should merely
4799 assert this is non-null. */
4802 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4807 /* Expand EXP, a call to __builtin_va_end. */
4810 expand_builtin_va_end (tree exp)
4812 tree valist = CALL_EXPR_ARG (exp, 0);
4814 /* Evaluate for side effects, if needed. I hate macros that don't
4816 if (TREE_SIDE_EFFECTS (valist))
4817 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4822 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4823 builtin rather than just as an assignment in stdarg.h because of the
4824 nastiness of array-type va_list types. */
4827 expand_builtin_va_copy (tree exp)
4830 location_t loc = EXPR_LOCATION (exp);
4832 dst = CALL_EXPR_ARG (exp, 0);
4833 src = CALL_EXPR_ARG (exp, 1);
4835 dst = stabilize_va_list_loc (loc, dst, 1);
4836 src = stabilize_va_list_loc (loc, src, 0);
4838 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4840 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4842 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4843 TREE_SIDE_EFFECTS (t) = 1;
4844 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4848 rtx dstb, srcb, size;
4850 /* Evaluate to pointers. */
4851 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4852 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4853 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4854 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4856 dstb = convert_memory_address (Pmode, dstb);
4857 srcb = convert_memory_address (Pmode, srcb);
4859 /* "Dereference" to BLKmode memories. */
4860 dstb = gen_rtx_MEM (BLKmode, dstb);
4861 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4862 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4863 srcb = gen_rtx_MEM (BLKmode, srcb);
4864 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4865 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4868 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4874 /* Expand a call to one of the builtin functions __builtin_frame_address or
4875 __builtin_return_address. */
4878 expand_builtin_frame_address (tree fndecl, tree exp)
4880 /* The argument must be a nonnegative integer constant.
4881 It counts the number of frames to scan up the stack.
4882 The value is the return address saved in that frame. */
4883 if (call_expr_nargs (exp) == 0)
4884 /* Warning about missing arg was already issued. */
4886 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4888 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4889 error ("invalid argument to %<__builtin_frame_address%>");
4891 error ("invalid argument to %<__builtin_return_address%>");
4897 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4898 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4900 /* Some ports cannot access arbitrary stack frames. */
4903 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4904 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4906 warning (0, "unsupported argument to %<__builtin_return_address%>");
4910 /* For __builtin_frame_address, return what we've got. */
4911 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4915 && ! CONSTANT_P (tem))
4916 tem = copy_to_mode_reg (Pmode, tem);
4921 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4922 we failed and the caller should emit a normal call, otherwise try to get
4923 the result in TARGET, if convenient. */
4926 expand_builtin_alloca (tree exp, rtx target)
4931 /* Emit normal call if marked not-inlineable. */
4932 if (CALL_CANNOT_INLINE_P (exp))
4935 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4938 /* Compute the argument. */
4939 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4941 /* Allocate the desired space. */
4942 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4943 result = convert_memory_address (ptr_mode, result);
4948 /* Expand a call to a bswap builtin with argument ARG0. MODE
4949 is the mode to expand with. */
4952 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4954 enum machine_mode mode;
4958 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4961 arg = CALL_EXPR_ARG (exp, 0);
4962 mode = TYPE_MODE (TREE_TYPE (arg));
4963 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4965 target = expand_unop (mode, bswap_optab, op0, target, 1);
4967 gcc_assert (target);
4969 return convert_to_mode (mode, target, 0);
4972 /* Expand a call to a unary builtin in EXP.
4973 Return NULL_RTX if a normal call should be emitted rather than expanding the
4974 function in-line. If convenient, the result should be placed in TARGET.
4975 SUBTARGET may be used as the target for computing one of EXP's operands. */
4978 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4979 rtx subtarget, optab op_optab)
4983 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4986 /* Compute the argument. */
4987 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4988 VOIDmode, EXPAND_NORMAL);
4989 /* Compute op, into TARGET if possible.
4990 Set TARGET to wherever the result comes back. */
4991 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4992 op_optab, op0, target, 1);
4993 gcc_assert (target);
4995 return convert_to_mode (target_mode, target, 0);
4998 /* Expand a call to __builtin_expect. We just return our argument
4999 as the builtin_expect semantic should've been already executed by
5000 tree branch prediction pass. */
5003 expand_builtin_expect (tree exp, rtx target)
5007 if (call_expr_nargs (exp) < 2)
5009 arg = CALL_EXPR_ARG (exp, 0);
5011 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5012 /* When guessing was done, the hints should be already stripped away. */
5013 gcc_assert (!flag_guess_branch_prob
5014 || optimize == 0 || seen_error ());
5019 expand_builtin_trap (void)
5023 emit_insn (gen_trap ());
5026 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5030 /* Expand a call to __builtin_unreachable. We do nothing except emit
5031 a barrier saying that control flow will not pass here.
5033 It is the responsibility of the program being compiled to ensure
5034 that control flow does never reach __builtin_unreachable. */
5036 expand_builtin_unreachable (void)
5041 /* Expand EXP, a call to fabs, fabsf or fabsl.
5042 Return NULL_RTX if a normal call should be emitted rather than expanding
5043 the function inline. If convenient, the result should be placed
5044 in TARGET. SUBTARGET may be used as the target for computing
5048 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5050 enum machine_mode mode;
5054 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5057 arg = CALL_EXPR_ARG (exp, 0);
5058 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5059 mode = TYPE_MODE (TREE_TYPE (arg));
5060 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5061 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5064 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5065 Return NULL is a normal call should be emitted rather than expanding the
5066 function inline. If convenient, the result should be placed in TARGET.
5067 SUBTARGET may be used as the target for computing the operand. */
5070 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5075 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5078 arg = CALL_EXPR_ARG (exp, 0);
5079 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5081 arg = CALL_EXPR_ARG (exp, 1);
5082 op1 = expand_normal (arg);
5084 return expand_copysign (op0, op1, target);
5087 /* Create a new constant string literal and return a char* pointer to it.
5088 The STRING_CST value is the LEN characters at STR. */
5090 build_string_literal (int len, const char *str)
5092 tree t, elem, index, type;
5094 t = build_string (len, str);
5095 elem = build_type_variant (char_type_node, 1, 0);
5096 index = build_index_type (size_int (len - 1));
5097 type = build_array_type (elem, index);
5098 TREE_TYPE (t) = type;
5099 TREE_CONSTANT (t) = 1;
5100 TREE_READONLY (t) = 1;
5101 TREE_STATIC (t) = 1;
5103 type = build_pointer_type (elem);
5104 t = build1 (ADDR_EXPR, type,
5105 build4 (ARRAY_REF, elem,
5106 t, integer_zero_node, NULL_TREE, NULL_TREE));
5110 /* Expand a call to either the entry or exit function profiler. */
5113 expand_builtin_profile_func (bool exitp)
5115 rtx this_rtx, which;
5117 this_rtx = DECL_RTL (current_function_decl);
5118 gcc_assert (MEM_P (this_rtx));
5119 this_rtx = XEXP (this_rtx, 0);
5122 which = profile_function_exit_libfunc;
5124 which = profile_function_entry_libfunc;
5126 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5127 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5134 /* Expand a call to __builtin___clear_cache. */
5137 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5139 #ifndef HAVE_clear_cache
5140 #ifdef CLEAR_INSN_CACHE
5141 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5142 does something. Just do the default expansion to a call to
5146 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5147 does nothing. There is no need to call it. Do nothing. */
5149 #endif /* CLEAR_INSN_CACHE */
5151 /* We have a "clear_cache" insn, and it will handle everything. */
5153 rtx begin_rtx, end_rtx;
5154 enum insn_code icode;
5156 /* We must not expand to a library call. If we did, any
5157 fallback library function in libgcc that might contain a call to
5158 __builtin___clear_cache() would recurse infinitely. */
5159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5161 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5165 if (HAVE_clear_cache)
5167 icode = CODE_FOR_clear_cache;
5169 begin = CALL_EXPR_ARG (exp, 0);
5170 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5171 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5172 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5173 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5175 end = CALL_EXPR_ARG (exp, 1);
5176 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5177 end_rtx = convert_memory_address (Pmode, end_rtx);
5178 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5179 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5181 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5184 #endif /* HAVE_clear_cache */
5187 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5190 round_trampoline_addr (rtx tramp)
5192 rtx temp, addend, mask;
5194 /* If we don't need too much alignment, we'll have been guaranteed
5195 proper alignment by get_trampoline_type. */
5196 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5199 /* Round address up to desired boundary. */
5200 temp = gen_reg_rtx (Pmode);
5201 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5202 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5204 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5205 temp, 0, OPTAB_LIB_WIDEN);
5206 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5207 temp, 0, OPTAB_LIB_WIDEN);
5213 expand_builtin_init_trampoline (tree exp)
5215 tree t_tramp, t_func, t_chain;
5216 rtx m_tramp, r_tramp, r_chain, tmp;
5218 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5219 POINTER_TYPE, VOID_TYPE))
5222 t_tramp = CALL_EXPR_ARG (exp, 0);
5223 t_func = CALL_EXPR_ARG (exp, 1);
5224 t_chain = CALL_EXPR_ARG (exp, 2);
5226 r_tramp = expand_normal (t_tramp);
5227 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5228 MEM_NOTRAP_P (m_tramp) = 1;
5230 /* The TRAMP argument should be the address of a field within the
5231 local function's FRAME decl. Let's see if we can fill in the
5232 to fill in the MEM_ATTRs for this memory. */
5233 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5234 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5237 tmp = round_trampoline_addr (r_tramp);
5240 m_tramp = change_address (m_tramp, BLKmode, tmp);
5241 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5242 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5245 /* The FUNC argument should be the address of the nested function.
5246 Extract the actual function decl to pass to the hook. */
5247 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5248 t_func = TREE_OPERAND (t_func, 0);
5249 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5251 r_chain = expand_normal (t_chain);
5253 /* Generate insns to initialize the trampoline. */
5254 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5256 trampolines_created = 1;
5261 expand_builtin_adjust_trampoline (tree exp)
5265 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5268 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5269 tramp = round_trampoline_addr (tramp);
5270 if (targetm.calls.trampoline_adjust_address)
5271 tramp = targetm.calls.trampoline_adjust_address (tramp);
5276 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5277 function. The function first checks whether the back end provides
5278 an insn to implement signbit for the respective mode. If not, it
5279 checks whether the floating point format of the value is such that
5280 the sign bit can be extracted. If that is not the case, the
5281 function returns NULL_RTX to indicate that a normal call should be
5282 emitted rather than expanding the function in-line. EXP is the
5283 expression that is a call to the builtin function; if convenient,
5284 the result should be placed in TARGET. */
5286 expand_builtin_signbit (tree exp, rtx target)
5288 const struct real_format *fmt;
5289 enum machine_mode fmode, imode, rmode;
5292 enum insn_code icode;
5294 location_t loc = EXPR_LOCATION (exp);
5296 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5299 arg = CALL_EXPR_ARG (exp, 0);
5300 fmode = TYPE_MODE (TREE_TYPE (arg));
5301 rmode = TYPE_MODE (TREE_TYPE (exp));
5302 fmt = REAL_MODE_FORMAT (fmode);
5304 arg = builtin_save_expr (arg);
5306 /* Expand the argument yielding a RTX expression. */
5307 temp = expand_normal (arg);
5309 /* Check if the back end provides an insn that handles signbit for the
5311 icode = signbit_optab->handlers [(int) fmode].insn_code;
5312 if (icode != CODE_FOR_nothing)
5314 rtx last = get_last_insn ();
5315 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5316 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5318 delete_insns_since (last);
5321 /* For floating point formats without a sign bit, implement signbit
5323 bitpos = fmt->signbit_ro;
5326 /* But we can't do this if the format supports signed zero. */
5327 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5330 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5331 build_real (TREE_TYPE (arg), dconst0));
5332 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5335 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5337 imode = int_mode_for_mode (fmode);
5338 if (imode == BLKmode)
5340 temp = gen_lowpart (imode, temp);
5345 /* Handle targets with different FP word orders. */
5346 if (FLOAT_WORDS_BIG_ENDIAN)
5347 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5349 word = bitpos / BITS_PER_WORD;
5350 temp = operand_subword_force (temp, word, fmode);
5351 bitpos = bitpos % BITS_PER_WORD;
5354 /* Force the intermediate word_mode (or narrower) result into a
5355 register. This avoids attempting to create paradoxical SUBREGs
5356 of floating point modes below. */
5357 temp = force_reg (imode, temp);
5359 /* If the bitpos is within the "result mode" lowpart, the operation
5360 can be implement with a single bitwise AND. Otherwise, we need
5361 a right shift and an AND. */
5363 if (bitpos < GET_MODE_BITSIZE (rmode))
5365 double_int mask = double_int_setbit (double_int_zero, bitpos);
5367 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5368 temp = gen_lowpart (rmode, temp);
5369 temp = expand_binop (rmode, and_optab, temp,
5370 immed_double_int_const (mask, rmode),
5371 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5375 /* Perform a logical right shift to place the signbit in the least
5376 significant bit, then truncate the result to the desired mode
5377 and mask just this bit. */
5378 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5379 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5380 temp = gen_lowpart (rmode, temp);
5381 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5382 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5388 /* Expand fork or exec calls. TARGET is the desired target of the
5389 call. EXP is the call. FN is the
5390 identificator of the actual function. IGNORE is nonzero if the
5391 value is to be ignored. */
5394 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5399 /* If we are not profiling, just call the function. */
5400 if (!profile_arc_flag)
5403 /* Otherwise call the wrapper. This should be equivalent for the rest of
5404 compiler, so the code does not diverge, and the wrapper may run the
5405 code necessary for keeping the profiling sane. */
5407 switch (DECL_FUNCTION_CODE (fn))
5410 id = get_identifier ("__gcov_fork");
5413 case BUILT_IN_EXECL:
5414 id = get_identifier ("__gcov_execl");
5417 case BUILT_IN_EXECV:
5418 id = get_identifier ("__gcov_execv");
5421 case BUILT_IN_EXECLP:
5422 id = get_identifier ("__gcov_execlp");
5425 case BUILT_IN_EXECLE:
5426 id = get_identifier ("__gcov_execle");
5429 case BUILT_IN_EXECVP:
5430 id = get_identifier ("__gcov_execvp");
5433 case BUILT_IN_EXECVE:
5434 id = get_identifier ("__gcov_execve");
5441 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5442 FUNCTION_DECL, id, TREE_TYPE (fn));
5443 DECL_EXTERNAL (decl) = 1;
5444 TREE_PUBLIC (decl) = 1;
5445 DECL_ARTIFICIAL (decl) = 1;
5446 TREE_NOTHROW (decl) = 1;
5447 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5448 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5449 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5450 return expand_call (call, target, ignore);
5455 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5456 the pointer in these functions is void*, the tree optimizers may remove
5457 casts. The mode computed in expand_builtin isn't reliable either, due
5458 to __sync_bool_compare_and_swap.
5460 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5461 group of builtins. This gives us log2 of the mode size. */
5463 static inline enum machine_mode
5464 get_builtin_sync_mode (int fcode_diff)
5466 /* The size is not negotiable, so ask not to get BLKmode in return
5467 if the target indicates that a smaller size would be better. */
5468 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5471 /* Expand the memory expression LOC and return the appropriate memory operand
5472 for the builtin_sync operations. */
5475 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5479 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5480 addr = convert_memory_address (Pmode, addr);
5482 /* Note that we explicitly do not want any alias information for this
5483 memory, so that we kill all other live memories. Otherwise we don't
5484 satisfy the full barrier semantics of the intrinsic. */
5485 mem = validize_mem (gen_rtx_MEM (mode, addr));
5487 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5488 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5489 MEM_VOLATILE_P (mem) = 1;
5494 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5495 EXP is the CALL_EXPR. CODE is the rtx code
5496 that corresponds to the arithmetic or logical operation from the name;
5497 an exception here is that NOT actually means NAND. TARGET is an optional
5498 place for us to store the results; AFTER is true if this is the
5499 fetch_and_xxx form. IGNORE is true if we don't actually care about
5500 the result of the operation at all. */
5503 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5504 enum rtx_code code, bool after,
5505 rtx target, bool ignore)
5508 enum machine_mode old_mode;
5509 location_t loc = EXPR_LOCATION (exp);
5511 if (code == NOT && warn_sync_nand)
5513 tree fndecl = get_callee_fndecl (exp);
5514 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5516 static bool warned_f_a_n, warned_n_a_f;
5520 case BUILT_IN_FETCH_AND_NAND_1:
5521 case BUILT_IN_FETCH_AND_NAND_2:
5522 case BUILT_IN_FETCH_AND_NAND_4:
5523 case BUILT_IN_FETCH_AND_NAND_8:
5524 case BUILT_IN_FETCH_AND_NAND_16:
5529 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5530 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5531 warned_f_a_n = true;
5534 case BUILT_IN_NAND_AND_FETCH_1:
5535 case BUILT_IN_NAND_AND_FETCH_2:
5536 case BUILT_IN_NAND_AND_FETCH_4:
5537 case BUILT_IN_NAND_AND_FETCH_8:
5538 case BUILT_IN_NAND_AND_FETCH_16:
5543 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5544 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5545 warned_n_a_f = true;
5553 /* Expand the operands. */
5554 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5556 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5557 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5558 of CONST_INTs, where we know the old_mode only from the call argument. */
5559 old_mode = GET_MODE (val);
5560 if (old_mode == VOIDmode)
5561 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5562 val = convert_modes (mode, old_mode, val, 1);
5565 return expand_sync_operation (mem, val, code);
5567 return expand_sync_fetch_operation (mem, val, code, after, target);
5570 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5571 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5572 true if this is the boolean form. TARGET is a place for us to store the
5573 results; this is NOT optional if IS_BOOL is true. */
5576 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5577 bool is_bool, rtx target)
5579 rtx old_val, new_val, mem;
5580 enum machine_mode old_mode;
5582 /* Expand the operands. */
5583 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5587 mode, EXPAND_NORMAL);
5588 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5589 of CONST_INTs, where we know the old_mode only from the call argument. */
5590 old_mode = GET_MODE (old_val);
5591 if (old_mode == VOIDmode)
5592 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5593 old_val = convert_modes (mode, old_mode, old_val, 1);
5595 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5596 mode, EXPAND_NORMAL);
5597 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5598 of CONST_INTs, where we know the old_mode only from the call argument. */
5599 old_mode = GET_MODE (new_val);
5600 if (old_mode == VOIDmode)
5601 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5602 new_val = convert_modes (mode, old_mode, new_val, 1);
5605 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5607 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5610 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5611 general form is actually an atomic exchange, and some targets only
5612 support a reduced form with the second argument being a constant 1.
5613 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5617 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5621 enum machine_mode old_mode;
5623 /* Expand the operands. */
5624 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5625 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5626 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5627 of CONST_INTs, where we know the old_mode only from the call argument. */
5628 old_mode = GET_MODE (val);
5629 if (old_mode == VOIDmode)
5630 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5631 val = convert_modes (mode, old_mode, val, 1);
5633 return expand_sync_lock_test_and_set (mem, val, target);
5636 /* Expand the __sync_synchronize intrinsic. */
5639 expand_builtin_synchronize (void)
5642 VEC (tree, gc) *v_clobbers;
5644 #ifdef HAVE_memory_barrier
5645 if (HAVE_memory_barrier)
5647 emit_insn (gen_memory_barrier ());
5652 if (synchronize_libfunc != NULL_RTX)
5654 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5658 /* If no explicit memory barrier instruction is available, create an
5659 empty asm stmt with a memory clobber. */
5660 v_clobbers = VEC_alloc (tree, gc, 1);
5661 VEC_quick_push (tree, v_clobbers,
5662 tree_cons (NULL, build_string (6, "memory"), NULL));
5663 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5664 gimple_asm_set_volatile (x, true);
5665 expand_asm_stmt (x);
5668 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5671 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5673 enum insn_code icode;
5675 rtx val = const0_rtx;
5677 /* Expand the operands. */
5678 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5680 /* If there is an explicit operation in the md file, use it. */
5681 icode = sync_lock_release[mode];
5682 if (icode != CODE_FOR_nothing)
5684 if (!insn_data[icode].operand[1].predicate (val, mode))
5685 val = force_reg (mode, val);
5687 insn = GEN_FCN (icode) (mem, val);
5695 /* Otherwise we can implement this operation by emitting a barrier
5696 followed by a store of zero. */
5697 expand_builtin_synchronize ();
5698 emit_move_insn (mem, val);
5701 /* Expand an expression EXP that calls a built-in function,
5702 with result going to TARGET if that's convenient
5703 (and in mode MODE if that's convenient).
5704 SUBTARGET may be used as the target for computing one of EXP's operands.
5705 IGNORE is nonzero if the value is to be ignored. */
5708 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5711 tree fndecl = get_callee_fndecl (exp);
5712 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5713 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5715 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5716 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5718 /* When not optimizing, generate calls to library functions for a certain
5721 && !called_as_built_in (fndecl)
5722 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5723 && fcode != BUILT_IN_ALLOCA
5724 && fcode != BUILT_IN_FREE)
5725 return expand_call (exp, target, ignore);
5727 /* The built-in function expanders test for target == const0_rtx
5728 to determine whether the function's result will be ignored. */
5730 target = const0_rtx;
5732 /* If the result of a pure or const built-in function is ignored, and
5733 none of its arguments are volatile, we can avoid expanding the
5734 built-in call and just evaluate the arguments for side-effects. */
5735 if (target == const0_rtx
5736 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5738 bool volatilep = false;
5740 call_expr_arg_iterator iter;
5742 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5743 if (TREE_THIS_VOLATILE (arg))
5751 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5752 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5759 CASE_FLT_FN (BUILT_IN_FABS):
5760 target = expand_builtin_fabs (exp, target, subtarget);
5765 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5766 target = expand_builtin_copysign (exp, target, subtarget);
5771 /* Just do a normal library call if we were unable to fold
5773 CASE_FLT_FN (BUILT_IN_CABS):
5776 CASE_FLT_FN (BUILT_IN_EXP):
5777 CASE_FLT_FN (BUILT_IN_EXP10):
5778 CASE_FLT_FN (BUILT_IN_POW10):
5779 CASE_FLT_FN (BUILT_IN_EXP2):
5780 CASE_FLT_FN (BUILT_IN_EXPM1):
5781 CASE_FLT_FN (BUILT_IN_LOGB):
5782 CASE_FLT_FN (BUILT_IN_LOG):
5783 CASE_FLT_FN (BUILT_IN_LOG10):
5784 CASE_FLT_FN (BUILT_IN_LOG2):
5785 CASE_FLT_FN (BUILT_IN_LOG1P):
5786 CASE_FLT_FN (BUILT_IN_TAN):
5787 CASE_FLT_FN (BUILT_IN_ASIN):
5788 CASE_FLT_FN (BUILT_IN_ACOS):
5789 CASE_FLT_FN (BUILT_IN_ATAN):
5790 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5791 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5792 because of possible accuracy problems. */
5793 if (! flag_unsafe_math_optimizations)
5795 CASE_FLT_FN (BUILT_IN_SQRT):
5796 CASE_FLT_FN (BUILT_IN_FLOOR):
5797 CASE_FLT_FN (BUILT_IN_CEIL):
5798 CASE_FLT_FN (BUILT_IN_TRUNC):
5799 CASE_FLT_FN (BUILT_IN_ROUND):
5800 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5801 CASE_FLT_FN (BUILT_IN_RINT):
5802 target = expand_builtin_mathfn (exp, target, subtarget);
5807 CASE_FLT_FN (BUILT_IN_ILOGB):
5808 if (! flag_unsafe_math_optimizations)
5810 CASE_FLT_FN (BUILT_IN_ISINF):
5811 CASE_FLT_FN (BUILT_IN_FINITE):
5812 case BUILT_IN_ISFINITE:
5813 case BUILT_IN_ISNORMAL:
5814 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5819 CASE_FLT_FN (BUILT_IN_LCEIL):
5820 CASE_FLT_FN (BUILT_IN_LLCEIL):
5821 CASE_FLT_FN (BUILT_IN_LFLOOR):
5822 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5823 target = expand_builtin_int_roundingfn (exp, target);
5828 CASE_FLT_FN (BUILT_IN_LRINT):
5829 CASE_FLT_FN (BUILT_IN_LLRINT):
5830 CASE_FLT_FN (BUILT_IN_LROUND):
5831 CASE_FLT_FN (BUILT_IN_LLROUND):
5832 target = expand_builtin_int_roundingfn_2 (exp, target);
5837 CASE_FLT_FN (BUILT_IN_POW):
5838 target = expand_builtin_pow (exp, target, subtarget);
5843 CASE_FLT_FN (BUILT_IN_POWI):
5844 target = expand_builtin_powi (exp, target, subtarget);
5849 CASE_FLT_FN (BUILT_IN_ATAN2):
5850 CASE_FLT_FN (BUILT_IN_LDEXP):
5851 CASE_FLT_FN (BUILT_IN_SCALB):
5852 CASE_FLT_FN (BUILT_IN_SCALBN):
5853 CASE_FLT_FN (BUILT_IN_SCALBLN):
5854 if (! flag_unsafe_math_optimizations)
5857 CASE_FLT_FN (BUILT_IN_FMOD):
5858 CASE_FLT_FN (BUILT_IN_REMAINDER):
5859 CASE_FLT_FN (BUILT_IN_DREM):
5860 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5865 CASE_FLT_FN (BUILT_IN_CEXPI):
5866 target = expand_builtin_cexpi (exp, target, subtarget);
5867 gcc_assert (target);
5870 CASE_FLT_FN (BUILT_IN_SIN):
5871 CASE_FLT_FN (BUILT_IN_COS):
5872 if (! flag_unsafe_math_optimizations)
5874 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5879 CASE_FLT_FN (BUILT_IN_SINCOS):
5880 if (! flag_unsafe_math_optimizations)
5882 target = expand_builtin_sincos (exp);
5887 case BUILT_IN_APPLY_ARGS:
5888 return expand_builtin_apply_args ();
5890 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5891 FUNCTION with a copy of the parameters described by
5892 ARGUMENTS, and ARGSIZE. It returns a block of memory
5893 allocated on the stack into which is stored all the registers
5894 that might possibly be used for returning the result of a
5895 function. ARGUMENTS is the value returned by
5896 __builtin_apply_args. ARGSIZE is the number of bytes of
5897 arguments that must be copied. ??? How should this value be
5898 computed? We'll also need a safe worst case value for varargs
5900 case BUILT_IN_APPLY:
5901 if (!validate_arglist (exp, POINTER_TYPE,
5902 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5903 && !validate_arglist (exp, REFERENCE_TYPE,
5904 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5910 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5911 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5912 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5914 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5917 /* __builtin_return (RESULT) causes the function to return the
5918 value described by RESULT. RESULT is address of the block of
5919 memory returned by __builtin_apply. */
5920 case BUILT_IN_RETURN:
5921 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5922 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5925 case BUILT_IN_SAVEREGS:
5926 return expand_builtin_saveregs ();
5928 case BUILT_IN_ARGS_INFO:
5929 return expand_builtin_args_info (exp);
5931 case BUILT_IN_VA_ARG_PACK:
5932 /* All valid uses of __builtin_va_arg_pack () are removed during
5934 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5937 case BUILT_IN_VA_ARG_PACK_LEN:
5938 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5940 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5943 /* Return the address of the first anonymous stack arg. */
5944 case BUILT_IN_NEXT_ARG:
5945 if (fold_builtin_next_arg (exp, false))
5947 return expand_builtin_next_arg ();
5949 case BUILT_IN_CLEAR_CACHE:
5950 target = expand_builtin___clear_cache (exp);
5955 case BUILT_IN_CLASSIFY_TYPE:
5956 return expand_builtin_classify_type (exp);
5958 case BUILT_IN_CONSTANT_P:
5961 case BUILT_IN_FRAME_ADDRESS:
5962 case BUILT_IN_RETURN_ADDRESS:
5963 return expand_builtin_frame_address (fndecl, exp);
5965 /* Returns the address of the area where the structure is returned.
5967 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5968 if (call_expr_nargs (exp) != 0
5969 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5970 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5973 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5975 case BUILT_IN_ALLOCA:
5976 target = expand_builtin_alloca (exp, target);
5981 case BUILT_IN_STACK_SAVE:
5982 return expand_stack_save ();
5984 case BUILT_IN_STACK_RESTORE:
5985 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5988 case BUILT_IN_BSWAP32:
5989 case BUILT_IN_BSWAP64:
5990 target = expand_builtin_bswap (exp, target, subtarget);
5996 CASE_INT_FN (BUILT_IN_FFS):
5997 case BUILT_IN_FFSIMAX:
5998 target = expand_builtin_unop (target_mode, exp, target,
5999 subtarget, ffs_optab);
6004 CASE_INT_FN (BUILT_IN_CLZ):
6005 case BUILT_IN_CLZIMAX:
6006 target = expand_builtin_unop (target_mode, exp, target,
6007 subtarget, clz_optab);
6012 CASE_INT_FN (BUILT_IN_CTZ):
6013 case BUILT_IN_CTZIMAX:
6014 target = expand_builtin_unop (target_mode, exp, target,
6015 subtarget, ctz_optab);
6020 CASE_INT_FN (BUILT_IN_POPCOUNT):
6021 case BUILT_IN_POPCOUNTIMAX:
6022 target = expand_builtin_unop (target_mode, exp, target,
6023 subtarget, popcount_optab);
6028 CASE_INT_FN (BUILT_IN_PARITY):
6029 case BUILT_IN_PARITYIMAX:
6030 target = expand_builtin_unop (target_mode, exp, target,
6031 subtarget, parity_optab);
6036 case BUILT_IN_STRLEN:
6037 target = expand_builtin_strlen (exp, target, target_mode);
6042 case BUILT_IN_STRCPY:
6043 target = expand_builtin_strcpy (exp, target);
6048 case BUILT_IN_STRNCPY:
6049 target = expand_builtin_strncpy (exp, target);
6054 case BUILT_IN_STPCPY:
6055 target = expand_builtin_stpcpy (exp, target, mode);
6060 case BUILT_IN_MEMCPY:
6061 target = expand_builtin_memcpy (exp, target);
6066 case BUILT_IN_MEMPCPY:
6067 target = expand_builtin_mempcpy (exp, target, mode);
6072 case BUILT_IN_MEMSET:
6073 target = expand_builtin_memset (exp, target, mode);
6078 case BUILT_IN_BZERO:
6079 target = expand_builtin_bzero (exp);
6084 case BUILT_IN_STRCMP:
6085 target = expand_builtin_strcmp (exp, target);
6090 case BUILT_IN_STRNCMP:
6091 target = expand_builtin_strncmp (exp, target, mode);
6097 case BUILT_IN_MEMCMP:
6098 target = expand_builtin_memcmp (exp, target, mode);
6103 case BUILT_IN_SETJMP:
6104 /* This should have been lowered to the builtins below. */
6107 case BUILT_IN_SETJMP_SETUP:
6108 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6109 and the receiver label. */
6110 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6112 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6113 VOIDmode, EXPAND_NORMAL);
6114 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6115 rtx label_r = label_rtx (label);
6117 /* This is copied from the handling of non-local gotos. */
6118 expand_builtin_setjmp_setup (buf_addr, label_r);
6119 nonlocal_goto_handler_labels
6120 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6121 nonlocal_goto_handler_labels);
6122 /* ??? Do not let expand_label treat us as such since we would
6123 not want to be both on the list of non-local labels and on
6124 the list of forced labels. */
6125 FORCED_LABEL (label) = 0;
6130 case BUILT_IN_SETJMP_DISPATCHER:
6131 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6132 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6134 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6135 rtx label_r = label_rtx (label);
6137 /* Remove the dispatcher label from the list of non-local labels
6138 since the receiver labels have been added to it above. */
6139 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6144 case BUILT_IN_SETJMP_RECEIVER:
6145 /* __builtin_setjmp_receiver is passed the receiver label. */
6146 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6148 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6149 rtx label_r = label_rtx (label);
6151 expand_builtin_setjmp_receiver (label_r);
6156 /* __builtin_longjmp is passed a pointer to an array of five words.
6157 It's similar to the C library longjmp function but works with
6158 __builtin_setjmp above. */
6159 case BUILT_IN_LONGJMP:
6160 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6162 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6163 VOIDmode, EXPAND_NORMAL);
6164 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6166 if (value != const1_rtx)
6168 error ("%<__builtin_longjmp%> second argument must be 1");
6172 expand_builtin_longjmp (buf_addr, value);
6177 case BUILT_IN_NONLOCAL_GOTO:
6178 target = expand_builtin_nonlocal_goto (exp);
6183 /* This updates the setjmp buffer that is its argument with the value
6184 of the current stack pointer. */
6185 case BUILT_IN_UPDATE_SETJMP_BUF:
6186 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6189 = expand_normal (CALL_EXPR_ARG (exp, 0));
6191 expand_builtin_update_setjmp_buf (buf_addr);
6197 expand_builtin_trap ();
6200 case BUILT_IN_UNREACHABLE:
6201 expand_builtin_unreachable ();
6204 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6205 case BUILT_IN_SIGNBITD32:
6206 case BUILT_IN_SIGNBITD64:
6207 case BUILT_IN_SIGNBITD128:
6208 target = expand_builtin_signbit (exp, target);
6213 /* Various hooks for the DWARF 2 __throw routine. */
6214 case BUILT_IN_UNWIND_INIT:
6215 expand_builtin_unwind_init ();
6217 case BUILT_IN_DWARF_CFA:
6218 return virtual_cfa_rtx;
6219 #ifdef DWARF2_UNWIND_INFO
6220 case BUILT_IN_DWARF_SP_COLUMN:
6221 return expand_builtin_dwarf_sp_column ();
6222 case BUILT_IN_INIT_DWARF_REG_SIZES:
6223 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6226 case BUILT_IN_FROB_RETURN_ADDR:
6227 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6228 case BUILT_IN_EXTRACT_RETURN_ADDR:
6229 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6230 case BUILT_IN_EH_RETURN:
6231 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6232 CALL_EXPR_ARG (exp, 1));
6234 #ifdef EH_RETURN_DATA_REGNO
6235 case BUILT_IN_EH_RETURN_DATA_REGNO:
6236 return expand_builtin_eh_return_data_regno (exp);
6238 case BUILT_IN_EXTEND_POINTER:
6239 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6240 case BUILT_IN_EH_POINTER:
6241 return expand_builtin_eh_pointer (exp);
6242 case BUILT_IN_EH_FILTER:
6243 return expand_builtin_eh_filter (exp);
6244 case BUILT_IN_EH_COPY_VALUES:
6245 return expand_builtin_eh_copy_values (exp);
6247 case BUILT_IN_VA_START:
6248 return expand_builtin_va_start (exp);
6249 case BUILT_IN_VA_END:
6250 return expand_builtin_va_end (exp);
6251 case BUILT_IN_VA_COPY:
6252 return expand_builtin_va_copy (exp);
6253 case BUILT_IN_EXPECT:
6254 return expand_builtin_expect (exp, target);
6255 case BUILT_IN_PREFETCH:
6256 expand_builtin_prefetch (exp);
6259 case BUILT_IN_PROFILE_FUNC_ENTER:
6260 return expand_builtin_profile_func (false);
6261 case BUILT_IN_PROFILE_FUNC_EXIT:
6262 return expand_builtin_profile_func (true);
6264 case BUILT_IN_INIT_TRAMPOLINE:
6265 return expand_builtin_init_trampoline (exp);
6266 case BUILT_IN_ADJUST_TRAMPOLINE:
6267 return expand_builtin_adjust_trampoline (exp);
6270 case BUILT_IN_EXECL:
6271 case BUILT_IN_EXECV:
6272 case BUILT_IN_EXECLP:
6273 case BUILT_IN_EXECLE:
6274 case BUILT_IN_EXECVP:
6275 case BUILT_IN_EXECVE:
6276 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6281 case BUILT_IN_FETCH_AND_ADD_1:
6282 case BUILT_IN_FETCH_AND_ADD_2:
6283 case BUILT_IN_FETCH_AND_ADD_4:
6284 case BUILT_IN_FETCH_AND_ADD_8:
6285 case BUILT_IN_FETCH_AND_ADD_16:
6286 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6287 target = expand_builtin_sync_operation (mode, exp, PLUS,
6288 false, target, ignore);
6293 case BUILT_IN_FETCH_AND_SUB_1:
6294 case BUILT_IN_FETCH_AND_SUB_2:
6295 case BUILT_IN_FETCH_AND_SUB_4:
6296 case BUILT_IN_FETCH_AND_SUB_8:
6297 case BUILT_IN_FETCH_AND_SUB_16:
6298 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6299 target = expand_builtin_sync_operation (mode, exp, MINUS,
6300 false, target, ignore);
6305 case BUILT_IN_FETCH_AND_OR_1:
6306 case BUILT_IN_FETCH_AND_OR_2:
6307 case BUILT_IN_FETCH_AND_OR_4:
6308 case BUILT_IN_FETCH_AND_OR_8:
6309 case BUILT_IN_FETCH_AND_OR_16:
6310 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6311 target = expand_builtin_sync_operation (mode, exp, IOR,
6312 false, target, ignore);
6317 case BUILT_IN_FETCH_AND_AND_1:
6318 case BUILT_IN_FETCH_AND_AND_2:
6319 case BUILT_IN_FETCH_AND_AND_4:
6320 case BUILT_IN_FETCH_AND_AND_8:
6321 case BUILT_IN_FETCH_AND_AND_16:
6322 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6323 target = expand_builtin_sync_operation (mode, exp, AND,
6324 false, target, ignore);
6329 case BUILT_IN_FETCH_AND_XOR_1:
6330 case BUILT_IN_FETCH_AND_XOR_2:
6331 case BUILT_IN_FETCH_AND_XOR_4:
6332 case BUILT_IN_FETCH_AND_XOR_8:
6333 case BUILT_IN_FETCH_AND_XOR_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6335 target = expand_builtin_sync_operation (mode, exp, XOR,
6336 false, target, ignore);
6341 case BUILT_IN_FETCH_AND_NAND_1:
6342 case BUILT_IN_FETCH_AND_NAND_2:
6343 case BUILT_IN_FETCH_AND_NAND_4:
6344 case BUILT_IN_FETCH_AND_NAND_8:
6345 case BUILT_IN_FETCH_AND_NAND_16:
6346 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6347 target = expand_builtin_sync_operation (mode, exp, NOT,
6348 false, target, ignore);
6353 case BUILT_IN_ADD_AND_FETCH_1:
6354 case BUILT_IN_ADD_AND_FETCH_2:
6355 case BUILT_IN_ADD_AND_FETCH_4:
6356 case BUILT_IN_ADD_AND_FETCH_8:
6357 case BUILT_IN_ADD_AND_FETCH_16:
6358 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6359 target = expand_builtin_sync_operation (mode, exp, PLUS,
6360 true, target, ignore);
6365 case BUILT_IN_SUB_AND_FETCH_1:
6366 case BUILT_IN_SUB_AND_FETCH_2:
6367 case BUILT_IN_SUB_AND_FETCH_4:
6368 case BUILT_IN_SUB_AND_FETCH_8:
6369 case BUILT_IN_SUB_AND_FETCH_16:
6370 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6371 target = expand_builtin_sync_operation (mode, exp, MINUS,
6372 true, target, ignore);
6377 case BUILT_IN_OR_AND_FETCH_1:
6378 case BUILT_IN_OR_AND_FETCH_2:
6379 case BUILT_IN_OR_AND_FETCH_4:
6380 case BUILT_IN_OR_AND_FETCH_8:
6381 case BUILT_IN_OR_AND_FETCH_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6383 target = expand_builtin_sync_operation (mode, exp, IOR,
6384 true, target, ignore);
6389 case BUILT_IN_AND_AND_FETCH_1:
6390 case BUILT_IN_AND_AND_FETCH_2:
6391 case BUILT_IN_AND_AND_FETCH_4:
6392 case BUILT_IN_AND_AND_FETCH_8:
6393 case BUILT_IN_AND_AND_FETCH_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6395 target = expand_builtin_sync_operation (mode, exp, AND,
6396 true, target, ignore);
6401 case BUILT_IN_XOR_AND_FETCH_1:
6402 case BUILT_IN_XOR_AND_FETCH_2:
6403 case BUILT_IN_XOR_AND_FETCH_4:
6404 case BUILT_IN_XOR_AND_FETCH_8:
6405 case BUILT_IN_XOR_AND_FETCH_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6407 target = expand_builtin_sync_operation (mode, exp, XOR,
6408 true, target, ignore);
6413 case BUILT_IN_NAND_AND_FETCH_1:
6414 case BUILT_IN_NAND_AND_FETCH_2:
6415 case BUILT_IN_NAND_AND_FETCH_4:
6416 case BUILT_IN_NAND_AND_FETCH_8:
6417 case BUILT_IN_NAND_AND_FETCH_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6419 target = expand_builtin_sync_operation (mode, exp, NOT,
6420 true, target, ignore);
6425 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6426 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6427 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6428 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6429 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6430 if (mode == VOIDmode)
6431 mode = TYPE_MODE (boolean_type_node);
6432 if (!target || !register_operand (target, mode))
6433 target = gen_reg_rtx (mode);
6435 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6436 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6441 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6442 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6443 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6444 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6445 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6447 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6452 case BUILT_IN_LOCK_TEST_AND_SET_1:
6453 case BUILT_IN_LOCK_TEST_AND_SET_2:
6454 case BUILT_IN_LOCK_TEST_AND_SET_4:
6455 case BUILT_IN_LOCK_TEST_AND_SET_8:
6456 case BUILT_IN_LOCK_TEST_AND_SET_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6458 target = expand_builtin_lock_test_and_set (mode, exp, target);
6463 case BUILT_IN_LOCK_RELEASE_1:
6464 case BUILT_IN_LOCK_RELEASE_2:
6465 case BUILT_IN_LOCK_RELEASE_4:
6466 case BUILT_IN_LOCK_RELEASE_8:
6467 case BUILT_IN_LOCK_RELEASE_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6469 expand_builtin_lock_release (mode, exp);
6472 case BUILT_IN_SYNCHRONIZE:
6473 expand_builtin_synchronize ();
6476 case BUILT_IN_OBJECT_SIZE:
6477 return expand_builtin_object_size (exp);
6479 case BUILT_IN_MEMCPY_CHK:
6480 case BUILT_IN_MEMPCPY_CHK:
6481 case BUILT_IN_MEMMOVE_CHK:
6482 case BUILT_IN_MEMSET_CHK:
6483 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6488 case BUILT_IN_STRCPY_CHK:
6489 case BUILT_IN_STPCPY_CHK:
6490 case BUILT_IN_STRNCPY_CHK:
6491 case BUILT_IN_STRCAT_CHK:
6492 case BUILT_IN_STRNCAT_CHK:
6493 case BUILT_IN_SNPRINTF_CHK:
6494 case BUILT_IN_VSNPRINTF_CHK:
6495 maybe_emit_chk_warning (exp, fcode);
6498 case BUILT_IN_SPRINTF_CHK:
6499 case BUILT_IN_VSPRINTF_CHK:
6500 maybe_emit_sprintf_chk_warning (exp, fcode);
6504 maybe_emit_free_warning (exp);
6507 default: /* just do library call, if unknown builtin */
6511 /* The switch statement above can drop through to cause the function
6512 to be called normally. */
6513 return expand_call (exp, target, ignore);
6516 /* Determine whether a tree node represents a call to a built-in
6517 function. If the tree T is a call to a built-in function with
6518 the right number of arguments of the appropriate types, return
6519 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6520 Otherwise the return value is END_BUILTINS. */
6522 enum built_in_function
6523 builtin_mathfn_code (const_tree t)
6525 const_tree fndecl, arg, parmlist;
6526 const_tree argtype, parmtype;
6527 const_call_expr_arg_iterator iter;
6529 if (TREE_CODE (t) != CALL_EXPR
6530 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6531 return END_BUILTINS;
6533 fndecl = get_callee_fndecl (t);
6534 if (fndecl == NULL_TREE
6535 || TREE_CODE (fndecl) != FUNCTION_DECL
6536 || ! DECL_BUILT_IN (fndecl)
6537 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6538 return END_BUILTINS;
6540 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6541 init_const_call_expr_arg_iterator (t, &iter);
6542 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6544 /* If a function doesn't take a variable number of arguments,
6545 the last element in the list will have type `void'. */
6546 parmtype = TREE_VALUE (parmlist);
6547 if (VOID_TYPE_P (parmtype))
6549 if (more_const_call_expr_args_p (&iter))
6550 return END_BUILTINS;
6551 return DECL_FUNCTION_CODE (fndecl);
6554 if (! more_const_call_expr_args_p (&iter))
6555 return END_BUILTINS;
6557 arg = next_const_call_expr_arg (&iter);
6558 argtype = TREE_TYPE (arg);
6560 if (SCALAR_FLOAT_TYPE_P (parmtype))
6562 if (! SCALAR_FLOAT_TYPE_P (argtype))
6563 return END_BUILTINS;
6565 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6567 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6568 return END_BUILTINS;
6570 else if (POINTER_TYPE_P (parmtype))
6572 if (! POINTER_TYPE_P (argtype))
6573 return END_BUILTINS;
6575 else if (INTEGRAL_TYPE_P (parmtype))
6577 if (! INTEGRAL_TYPE_P (argtype))
6578 return END_BUILTINS;
6581 return END_BUILTINS;
6584 /* Variable-length argument list. */
6585 return DECL_FUNCTION_CODE (fndecl);
6588 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6589 evaluate to a constant. */
6592 fold_builtin_constant_p (tree arg)
6594 /* We return 1 for a numeric type that's known to be a constant
6595 value at compile-time or for an aggregate type that's a
6596 literal constant. */
6599 /* If we know this is a constant, emit the constant of one. */
6600 if (CONSTANT_CLASS_P (arg)
6601 || (TREE_CODE (arg) == CONSTRUCTOR
6602 && TREE_CONSTANT (arg)))
6603 return integer_one_node;
6604 if (TREE_CODE (arg) == ADDR_EXPR)
6606 tree op = TREE_OPERAND (arg, 0);
6607 if (TREE_CODE (op) == STRING_CST
6608 || (TREE_CODE (op) == ARRAY_REF
6609 && integer_zerop (TREE_OPERAND (op, 1))
6610 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6611 return integer_one_node;
6614 /* If this expression has side effects, show we don't know it to be a
6615 constant. Likewise if it's a pointer or aggregate type since in
6616 those case we only want literals, since those are only optimized
6617 when generating RTL, not later.
6618 And finally, if we are compiling an initializer, not code, we
6619 need to return a definite result now; there's not going to be any
6620 more optimization done. */
6621 if (TREE_SIDE_EFFECTS (arg)
6622 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6623 || POINTER_TYPE_P (TREE_TYPE (arg))
6625 || folding_initializer)
6626 return integer_zero_node;
6631 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6632 return it as a truthvalue. */
6635 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6637 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6639 fn = built_in_decls[BUILT_IN_EXPECT];
6640 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6641 ret_type = TREE_TYPE (TREE_TYPE (fn));
6642 pred_type = TREE_VALUE (arg_types);
6643 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6645 pred = fold_convert_loc (loc, pred_type, pred);
6646 expected = fold_convert_loc (loc, expected_type, expected);
6647 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6649 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6650 build_int_cst (ret_type, 0));
6653 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6654 NULL_TREE if no simplification is possible. */
6657 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6660 enum tree_code code;
6662 /* If this is a builtin_expect within a builtin_expect keep the
6663 inner one. See through a comparison against a constant. It
6664 might have been added to create a thruthvalue. */
6666 if (COMPARISON_CLASS_P (inner)
6667 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6668 inner = TREE_OPERAND (inner, 0);
6670 if (TREE_CODE (inner) == CALL_EXPR
6671 && (fndecl = get_callee_fndecl (inner))
6672 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6673 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6676 /* Distribute the expected value over short-circuiting operators.
6677 See through the cast from truthvalue_type_node to long. */
6679 while (TREE_CODE (inner) == NOP_EXPR
6680 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6681 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6682 inner = TREE_OPERAND (inner, 0);
6684 code = TREE_CODE (inner);
6685 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6687 tree op0 = TREE_OPERAND (inner, 0);
6688 tree op1 = TREE_OPERAND (inner, 1);
6690 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6691 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6692 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6694 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6697 /* If the argument isn't invariant then there's nothing else we can do. */
6698 if (!TREE_CONSTANT (arg0))
6701 /* If we expect that a comparison against the argument will fold to
6702 a constant return the constant. In practice, this means a true
6703 constant or the address of a non-weak symbol. */
6706 if (TREE_CODE (inner) == ADDR_EXPR)
6710 inner = TREE_OPERAND (inner, 0);
6712 while (TREE_CODE (inner) == COMPONENT_REF
6713 || TREE_CODE (inner) == ARRAY_REF);
6714 if ((TREE_CODE (inner) == VAR_DECL
6715 || TREE_CODE (inner) == FUNCTION_DECL)
6716 && DECL_WEAK (inner))
6720 /* Otherwise, ARG0 already has the proper type for the return value. */
6724 /* Fold a call to __builtin_classify_type with argument ARG. */
6727 fold_builtin_classify_type (tree arg)
6730 return build_int_cst (NULL_TREE, no_type_class);
6732 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6735 /* Fold a call to __builtin_strlen with argument ARG. */
6738 fold_builtin_strlen (location_t loc, tree type, tree arg)
6740 if (!validate_arg (arg, POINTER_TYPE))
6744 tree len = c_strlen (arg, 0);
6747 return fold_convert_loc (loc, type, len);
6753 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6756 fold_builtin_inf (location_t loc, tree type, int warn)
6758 REAL_VALUE_TYPE real;
6760 /* __builtin_inff is intended to be usable to define INFINITY on all
6761 targets. If an infinity is not available, INFINITY expands "to a
6762 positive constant of type float that overflows at translation
6763 time", footnote "In this case, using INFINITY will violate the
6764 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6765 Thus we pedwarn to ensure this constraint violation is
6767 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6768 pedwarn (loc, 0, "target format does not support infinity");
6771 return build_real (type, real);
6774 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6777 fold_builtin_nan (tree arg, tree type, int quiet)
6779 REAL_VALUE_TYPE real;
6782 if (!validate_arg (arg, POINTER_TYPE))
6784 str = c_getstr (arg);
6788 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6791 return build_real (type, real);
6794 /* Return true if the floating point expression T has an integer value.
6795 We also allow +Inf, -Inf and NaN to be considered integer values. */
6798 integer_valued_real_p (tree t)
6800 switch (TREE_CODE (t))
6807 return integer_valued_real_p (TREE_OPERAND (t, 0));
6812 return integer_valued_real_p (TREE_OPERAND (t, 1));
6819 return integer_valued_real_p (TREE_OPERAND (t, 0))
6820 && integer_valued_real_p (TREE_OPERAND (t, 1));
6823 return integer_valued_real_p (TREE_OPERAND (t, 1))
6824 && integer_valued_real_p (TREE_OPERAND (t, 2));
6827 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6831 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6832 if (TREE_CODE (type) == INTEGER_TYPE)
6834 if (TREE_CODE (type) == REAL_TYPE)
6835 return integer_valued_real_p (TREE_OPERAND (t, 0));
6840 switch (builtin_mathfn_code (t))
6842 CASE_FLT_FN (BUILT_IN_CEIL):
6843 CASE_FLT_FN (BUILT_IN_FLOOR):
6844 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6845 CASE_FLT_FN (BUILT_IN_RINT):
6846 CASE_FLT_FN (BUILT_IN_ROUND):
6847 CASE_FLT_FN (BUILT_IN_TRUNC):
6850 CASE_FLT_FN (BUILT_IN_FMIN):
6851 CASE_FLT_FN (BUILT_IN_FMAX):
6852 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6853 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6866 /* FNDECL is assumed to be a builtin where truncation can be propagated
6867 across (for instance floor((double)f) == (double)floorf (f).
6868 Do the transformation for a call with argument ARG. */
6871 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6873 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6875 if (!validate_arg (arg, REAL_TYPE))
6878 /* Integer rounding functions are idempotent. */
6879 if (fcode == builtin_mathfn_code (arg))
6882 /* If argument is already integer valued, and we don't need to worry
6883 about setting errno, there's no need to perform rounding. */
6884 if (! flag_errno_math && integer_valued_real_p (arg))
6889 tree arg0 = strip_float_extensions (arg);
6890 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6891 tree newtype = TREE_TYPE (arg0);
6894 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6895 && (decl = mathfn_built_in (newtype, fcode)))
6896 return fold_convert_loc (loc, ftype,
6897 build_call_expr_loc (loc, decl, 1,
6898 fold_convert_loc (loc,
6905 /* FNDECL is assumed to be builtin which can narrow the FP type of
6906 the argument, for instance lround((double)f) -> lroundf (f).
6907 Do the transformation for a call with argument ARG. */
6910 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6912 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6914 if (!validate_arg (arg, REAL_TYPE))
6917 /* If argument is already integer valued, and we don't need to worry
6918 about setting errno, there's no need to perform rounding. */
6919 if (! flag_errno_math && integer_valued_real_p (arg))
6920 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6921 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6925 tree ftype = TREE_TYPE (arg);
6926 tree arg0 = strip_float_extensions (arg);
6927 tree newtype = TREE_TYPE (arg0);
6930 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6931 && (decl = mathfn_built_in (newtype, fcode)))
6932 return build_call_expr_loc (loc, decl, 1,
6933 fold_convert_loc (loc, newtype, arg0));
6936 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6937 sizeof (long long) == sizeof (long). */
6938 if (TYPE_PRECISION (long_long_integer_type_node)
6939 == TYPE_PRECISION (long_integer_type_node))
6941 tree newfn = NULL_TREE;
6944 CASE_FLT_FN (BUILT_IN_LLCEIL):
6945 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6948 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6949 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6952 CASE_FLT_FN (BUILT_IN_LLROUND):
6953 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6956 CASE_FLT_FN (BUILT_IN_LLRINT):
6957 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6966 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6967 return fold_convert_loc (loc,
6968 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6975 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6976 return type. Return NULL_TREE if no simplification can be made. */
6979 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6983 if (!validate_arg (arg, COMPLEX_TYPE)
6984 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6987 /* Calculate the result when the argument is a constant. */
6988 if (TREE_CODE (arg) == COMPLEX_CST
6989 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6993 if (TREE_CODE (arg) == COMPLEX_EXPR)
6995 tree real = TREE_OPERAND (arg, 0);
6996 tree imag = TREE_OPERAND (arg, 1);
6998 /* If either part is zero, cabs is fabs of the other. */
6999 if (real_zerop (real))
7000 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7001 if (real_zerop (imag))
7002 return fold_build1_loc (loc, ABS_EXPR, type, real);
7004 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7005 if (flag_unsafe_math_optimizations
7006 && operand_equal_p (real, imag, OEP_PURE_SAME))
7008 const REAL_VALUE_TYPE sqrt2_trunc
7009 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7011 return fold_build2_loc (loc, MULT_EXPR, type,
7012 fold_build1_loc (loc, ABS_EXPR, type, real),
7013 build_real (type, sqrt2_trunc));
7017 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7018 if (TREE_CODE (arg) == NEGATE_EXPR
7019 || TREE_CODE (arg) == CONJ_EXPR)
7020 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7022 /* Don't do this when optimizing for size. */
7023 if (flag_unsafe_math_optimizations
7024 && optimize && optimize_function_for_speed_p (cfun))
7026 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7028 if (sqrtfn != NULL_TREE)
7030 tree rpart, ipart, result;
7032 arg = builtin_save_expr (arg);
7034 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7035 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7037 rpart = builtin_save_expr (rpart);
7038 ipart = builtin_save_expr (ipart);
7040 result = fold_build2_loc (loc, PLUS_EXPR, type,
7041 fold_build2_loc (loc, MULT_EXPR, type,
7043 fold_build2_loc (loc, MULT_EXPR, type,
7046 return build_call_expr_loc (loc, sqrtfn, 1, result);
7053 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7054 complex tree type of the result. If NEG is true, the imaginary
7055 zero is negative. */
7058 build_complex_cproj (tree type, bool neg)
7060 REAL_VALUE_TYPE rinf, rzero = dconst0;
7064 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7065 build_real (TREE_TYPE (type), rzero));
7068 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7069 return type. Return NULL_TREE if no simplification can be made. */
7072 fold_builtin_cproj (location_t loc, tree arg, tree type)
7074 if (!validate_arg (arg, COMPLEX_TYPE)
7075 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7078 /* If there are no infinities, return arg. */
7079 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7080 return non_lvalue_loc (loc, arg);
7082 /* Calculate the result when the argument is a constant. */
7083 if (TREE_CODE (arg) == COMPLEX_CST)
7085 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7086 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7088 if (real_isinf (real) || real_isinf (imag))
7089 return build_complex_cproj (type, imag->sign);
7093 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7095 tree real = TREE_OPERAND (arg, 0);
7096 tree imag = TREE_OPERAND (arg, 1);
7101 /* If the real part is inf and the imag part is known to be
7102 nonnegative, return (inf + 0i). Remember side-effects are
7103 possible in the imag part. */
7104 if (TREE_CODE (real) == REAL_CST
7105 && real_isinf (TREE_REAL_CST_PTR (real))
7106 && tree_expr_nonnegative_p (imag))
7107 return omit_one_operand_loc (loc, type,
7108 build_complex_cproj (type, false),
7111 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7112 Remember side-effects are possible in the real part. */
7113 if (TREE_CODE (imag) == REAL_CST
7114 && real_isinf (TREE_REAL_CST_PTR (imag)))
7116 omit_one_operand_loc (loc, type,
7117 build_complex_cproj (type, TREE_REAL_CST_PTR
7118 (imag)->sign), arg);
7124 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7125 Return NULL_TREE if no simplification can be made. */
7128 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7131 enum built_in_function fcode;
7134 if (!validate_arg (arg, REAL_TYPE))
7137 /* Calculate the result when the argument is a constant. */
7138 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7141 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7142 fcode = builtin_mathfn_code (arg);
7143 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7145 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7146 arg = fold_build2_loc (loc, MULT_EXPR, type,
7147 CALL_EXPR_ARG (arg, 0),
7148 build_real (type, dconsthalf));
7149 return build_call_expr_loc (loc, expfn, 1, arg);
7152 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7153 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7155 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7159 tree arg0 = CALL_EXPR_ARG (arg, 0);
7161 /* The inner root was either sqrt or cbrt. */
7162 /* This was a conditional expression but it triggered a bug
7164 REAL_VALUE_TYPE dconstroot;
7165 if (BUILTIN_SQRT_P (fcode))
7166 dconstroot = dconsthalf;
7168 dconstroot = dconst_third ();
7170 /* Adjust for the outer root. */
7171 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7172 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7173 tree_root = build_real (type, dconstroot);
7174 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7178 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7179 if (flag_unsafe_math_optimizations
7180 && (fcode == BUILT_IN_POW
7181 || fcode == BUILT_IN_POWF
7182 || fcode == BUILT_IN_POWL))
7184 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7185 tree arg0 = CALL_EXPR_ARG (arg, 0);
7186 tree arg1 = CALL_EXPR_ARG (arg, 1);
7188 if (!tree_expr_nonnegative_p (arg0))
7189 arg0 = build1 (ABS_EXPR, type, arg0);
7190 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7191 build_real (type, dconsthalf));
7192 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7198 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7199 Return NULL_TREE if no simplification can be made. */
7202 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7204 const enum built_in_function fcode = builtin_mathfn_code (arg);
7207 if (!validate_arg (arg, REAL_TYPE))
7210 /* Calculate the result when the argument is a constant. */
7211 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7214 if (flag_unsafe_math_optimizations)
7216 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7217 if (BUILTIN_EXPONENT_P (fcode))
7219 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7220 const REAL_VALUE_TYPE third_trunc =
7221 real_value_truncate (TYPE_MODE (type), dconst_third ());
7222 arg = fold_build2_loc (loc, MULT_EXPR, type,
7223 CALL_EXPR_ARG (arg, 0),
7224 build_real (type, third_trunc));
7225 return build_call_expr_loc (loc, expfn, 1, arg);
7228 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7229 if (BUILTIN_SQRT_P (fcode))
7231 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7235 tree arg0 = CALL_EXPR_ARG (arg, 0);
7237 REAL_VALUE_TYPE dconstroot = dconst_third ();
7239 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7240 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7241 tree_root = build_real (type, dconstroot);
7242 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7246 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7247 if (BUILTIN_CBRT_P (fcode))
7249 tree arg0 = CALL_EXPR_ARG (arg, 0);
7250 if (tree_expr_nonnegative_p (arg0))
7252 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7257 REAL_VALUE_TYPE dconstroot;
7259 real_arithmetic (&dconstroot, MULT_EXPR,
7260 dconst_third_ptr (), dconst_third_ptr ());
7261 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7262 tree_root = build_real (type, dconstroot);
7263 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7268 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7269 if (fcode == BUILT_IN_POW
7270 || fcode == BUILT_IN_POWF
7271 || fcode == BUILT_IN_POWL)
7273 tree arg00 = CALL_EXPR_ARG (arg, 0);
7274 tree arg01 = CALL_EXPR_ARG (arg, 1);
7275 if (tree_expr_nonnegative_p (arg00))
7277 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7278 const REAL_VALUE_TYPE dconstroot
7279 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7280 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7281 build_real (type, dconstroot));
7282 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7289 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7290 TYPE is the type of the return value. Return NULL_TREE if no
7291 simplification can be made. */
7294 fold_builtin_cos (location_t loc,
7295 tree arg, tree type, tree fndecl)
7299 if (!validate_arg (arg, REAL_TYPE))
7302 /* Calculate the result when the argument is a constant. */
7303 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7306 /* Optimize cos(-x) into cos (x). */
7307 if ((narg = fold_strip_sign_ops (arg)))
7308 return build_call_expr_loc (loc, fndecl, 1, narg);
7313 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7314 Return NULL_TREE if no simplification can be made. */
7317 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7319 if (validate_arg (arg, REAL_TYPE))
7323 /* Calculate the result when the argument is a constant. */
7324 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7327 /* Optimize cosh(-x) into cosh (x). */
7328 if ((narg = fold_strip_sign_ops (arg)))
7329 return build_call_expr_loc (loc, fndecl, 1, narg);
7335 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7336 argument ARG. TYPE is the type of the return value. Return
7337 NULL_TREE if no simplification can be made. */
7340 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7343 if (validate_arg (arg, COMPLEX_TYPE)
7344 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7348 /* Calculate the result when the argument is a constant. */
7349 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7352 /* Optimize fn(-x) into fn(x). */
7353 if ((tmp = fold_strip_sign_ops (arg)))
7354 return build_call_expr_loc (loc, fndecl, 1, tmp);
7360 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7361 Return NULL_TREE if no simplification can be made. */
7364 fold_builtin_tan (tree arg, tree type)
7366 enum built_in_function fcode;
7369 if (!validate_arg (arg, REAL_TYPE))
7372 /* Calculate the result when the argument is a constant. */
7373 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7376 /* Optimize tan(atan(x)) = x. */
7377 fcode = builtin_mathfn_code (arg);
7378 if (flag_unsafe_math_optimizations
7379 && (fcode == BUILT_IN_ATAN
7380 || fcode == BUILT_IN_ATANF
7381 || fcode == BUILT_IN_ATANL))
7382 return CALL_EXPR_ARG (arg, 0);
7387 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7388 NULL_TREE if no simplification can be made. */
7391 fold_builtin_sincos (location_t loc,
7392 tree arg0, tree arg1, tree arg2)
7397 if (!validate_arg (arg0, REAL_TYPE)
7398 || !validate_arg (arg1, POINTER_TYPE)
7399 || !validate_arg (arg2, POINTER_TYPE))
7402 type = TREE_TYPE (arg0);
7404 /* Calculate the result when the argument is a constant. */
7405 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7408 /* Canonicalize sincos to cexpi. */
7409 if (!TARGET_C99_FUNCTIONS)
7411 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7415 call = build_call_expr_loc (loc, fn, 1, arg0);
7416 call = builtin_save_expr (call);
7418 return build2 (COMPOUND_EXPR, void_type_node,
7419 build2 (MODIFY_EXPR, void_type_node,
7420 build_fold_indirect_ref_loc (loc, arg1),
7421 build1 (IMAGPART_EXPR, type, call)),
7422 build2 (MODIFY_EXPR, void_type_node,
7423 build_fold_indirect_ref_loc (loc, arg2),
7424 build1 (REALPART_EXPR, type, call)));
7427 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7428 NULL_TREE if no simplification can be made. */
7431 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7434 tree realp, imagp, ifn;
7437 if (!validate_arg (arg0, COMPLEX_TYPE)
7438 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7441 /* Calculate the result when the argument is a constant. */
7442 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7445 rtype = TREE_TYPE (TREE_TYPE (arg0));
7447 /* In case we can figure out the real part of arg0 and it is constant zero
7449 if (!TARGET_C99_FUNCTIONS)
7451 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7455 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7456 && real_zerop (realp))
7458 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7459 return build_call_expr_loc (loc, ifn, 1, narg);
7462 /* In case we can easily decompose real and imaginary parts split cexp
7463 to exp (r) * cexpi (i). */
7464 if (flag_unsafe_math_optimizations
7467 tree rfn, rcall, icall;
7469 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7473 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7477 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7478 icall = builtin_save_expr (icall);
7479 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7480 rcall = builtin_save_expr (rcall);
7481 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7482 fold_build2_loc (loc, MULT_EXPR, rtype,
7484 fold_build1_loc (loc, REALPART_EXPR,
7486 fold_build2_loc (loc, MULT_EXPR, rtype,
7488 fold_build1_loc (loc, IMAGPART_EXPR,
7495 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7496 Return NULL_TREE if no simplification can be made. */
7499 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7501 if (!validate_arg (arg, REAL_TYPE))
7504 /* Optimize trunc of constant value. */
7505 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7507 REAL_VALUE_TYPE r, x;
7508 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7510 x = TREE_REAL_CST (arg);
7511 real_trunc (&r, TYPE_MODE (type), &x);
7512 return build_real (type, r);
7515 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7518 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7519 Return NULL_TREE if no simplification can be made. */
7522 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7524 if (!validate_arg (arg, REAL_TYPE))
7527 /* Optimize floor of constant value. */
7528 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7532 x = TREE_REAL_CST (arg);
7533 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7535 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7538 real_floor (&r, TYPE_MODE (type), &x);
7539 return build_real (type, r);
7543 /* Fold floor (x) where x is nonnegative to trunc (x). */
7544 if (tree_expr_nonnegative_p (arg))
7546 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7548 return build_call_expr_loc (loc, truncfn, 1, arg);
7551 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7554 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7555 Return NULL_TREE if no simplification can be made. */
7558 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7560 if (!validate_arg (arg, REAL_TYPE))
7563 /* Optimize ceil of constant value. */
7564 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7568 x = TREE_REAL_CST (arg);
7569 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7571 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7574 real_ceil (&r, TYPE_MODE (type), &x);
7575 return build_real (type, r);
7579 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7582 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7583 Return NULL_TREE if no simplification can be made. */
7586 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7588 if (!validate_arg (arg, REAL_TYPE))
7591 /* Optimize round of constant value. */
7592 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7596 x = TREE_REAL_CST (arg);
7597 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7599 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7602 real_round (&r, TYPE_MODE (type), &x);
7603 return build_real (type, r);
7607 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7610 /* Fold function call to builtin lround, lroundf or lroundl (or the
7611 corresponding long long versions) and other rounding functions. ARG
7612 is the argument to the call. Return NULL_TREE if no simplification
7616 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7618 if (!validate_arg (arg, REAL_TYPE))
7621 /* Optimize lround of constant value. */
7622 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7624 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7626 if (real_isfinite (&x))
7628 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7629 tree ftype = TREE_TYPE (arg);
7630 unsigned HOST_WIDE_INT lo2;
7631 HOST_WIDE_INT hi, lo;
7634 switch (DECL_FUNCTION_CODE (fndecl))
7636 CASE_FLT_FN (BUILT_IN_LFLOOR):
7637 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7638 real_floor (&r, TYPE_MODE (ftype), &x);
7641 CASE_FLT_FN (BUILT_IN_LCEIL):
7642 CASE_FLT_FN (BUILT_IN_LLCEIL):
7643 real_ceil (&r, TYPE_MODE (ftype), &x);
7646 CASE_FLT_FN (BUILT_IN_LROUND):
7647 CASE_FLT_FN (BUILT_IN_LLROUND):
7648 real_round (&r, TYPE_MODE (ftype), &x);
7655 REAL_VALUE_TO_INT (&lo, &hi, r);
7656 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7657 return build_int_cst_wide (itype, lo2, hi);
7661 switch (DECL_FUNCTION_CODE (fndecl))
7663 CASE_FLT_FN (BUILT_IN_LFLOOR):
7664 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7665 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7666 if (tree_expr_nonnegative_p (arg))
7667 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7668 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7673 return fold_fixed_mathfn (loc, fndecl, arg);
7676 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7677 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7678 the argument to the call. Return NULL_TREE if no simplification can
7682 fold_builtin_bitop (tree fndecl, tree arg)
7684 if (!validate_arg (arg, INTEGER_TYPE))
7687 /* Optimize for constant argument. */
7688 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7690 HOST_WIDE_INT hi, width, result;
7691 unsigned HOST_WIDE_INT lo;
7694 type = TREE_TYPE (arg);
7695 width = TYPE_PRECISION (type);
7696 lo = TREE_INT_CST_LOW (arg);
7698 /* Clear all the bits that are beyond the type's precision. */
7699 if (width > HOST_BITS_PER_WIDE_INT)
7701 hi = TREE_INT_CST_HIGH (arg);
7702 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7703 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7708 if (width < HOST_BITS_PER_WIDE_INT)
7709 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7712 switch (DECL_FUNCTION_CODE (fndecl))
7714 CASE_INT_FN (BUILT_IN_FFS):
7716 result = exact_log2 (lo & -lo) + 1;
7718 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7723 CASE_INT_FN (BUILT_IN_CLZ):
7725 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7727 result = width - floor_log2 (lo) - 1;
7728 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7732 CASE_INT_FN (BUILT_IN_CTZ):
7734 result = exact_log2 (lo & -lo);
7736 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7737 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7741 CASE_INT_FN (BUILT_IN_POPCOUNT):
7744 result++, lo &= lo - 1;
7746 result++, hi &= hi - 1;
7749 CASE_INT_FN (BUILT_IN_PARITY):
7752 result++, lo &= lo - 1;
7754 result++, hi &= hi - 1;
7762 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7768 /* Fold function call to builtin_bswap and the long and long long
7769 variants. Return NULL_TREE if no simplification can be made. */
7771 fold_builtin_bswap (tree fndecl, tree arg)
7773 if (! validate_arg (arg, INTEGER_TYPE))
7776 /* Optimize constant value. */
7777 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7779 HOST_WIDE_INT hi, width, r_hi = 0;
7780 unsigned HOST_WIDE_INT lo, r_lo = 0;
7783 type = TREE_TYPE (arg);
7784 width = TYPE_PRECISION (type);
7785 lo = TREE_INT_CST_LOW (arg);
7786 hi = TREE_INT_CST_HIGH (arg);
7788 switch (DECL_FUNCTION_CODE (fndecl))
7790 case BUILT_IN_BSWAP32:
7791 case BUILT_IN_BSWAP64:
7795 for (s = 0; s < width; s += 8)
7797 int d = width - s - 8;
7798 unsigned HOST_WIDE_INT byte;
7800 if (s < HOST_BITS_PER_WIDE_INT)
7801 byte = (lo >> s) & 0xff;
7803 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7805 if (d < HOST_BITS_PER_WIDE_INT)
7808 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7818 if (width < HOST_BITS_PER_WIDE_INT)
7819 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7821 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7827 /* A subroutine of fold_builtin to fold the various logarithmic
7828 functions. Return NULL_TREE if no simplification can me made.
7829 FUNC is the corresponding MPFR logarithm function. */
7832 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7833 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7835 if (validate_arg (arg, REAL_TYPE))
7837 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7839 const enum built_in_function fcode = builtin_mathfn_code (arg);
7841 /* Calculate the result when the argument is a constant. */
7842 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7845 /* Special case, optimize logN(expN(x)) = x. */
7846 if (flag_unsafe_math_optimizations
7847 && ((func == mpfr_log
7848 && (fcode == BUILT_IN_EXP
7849 || fcode == BUILT_IN_EXPF
7850 || fcode == BUILT_IN_EXPL))
7851 || (func == mpfr_log2
7852 && (fcode == BUILT_IN_EXP2
7853 || fcode == BUILT_IN_EXP2F
7854 || fcode == BUILT_IN_EXP2L))
7855 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7856 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7858 /* Optimize logN(func()) for various exponential functions. We
7859 want to determine the value "x" and the power "exponent" in
7860 order to transform logN(x**exponent) into exponent*logN(x). */
7861 if (flag_unsafe_math_optimizations)
7863 tree exponent = 0, x = 0;
7867 CASE_FLT_FN (BUILT_IN_EXP):
7868 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7869 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7871 exponent = CALL_EXPR_ARG (arg, 0);
7873 CASE_FLT_FN (BUILT_IN_EXP2):
7874 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7875 x = build_real (type, dconst2);
7876 exponent = CALL_EXPR_ARG (arg, 0);
7878 CASE_FLT_FN (BUILT_IN_EXP10):
7879 CASE_FLT_FN (BUILT_IN_POW10):
7880 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7882 REAL_VALUE_TYPE dconst10;
7883 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7884 x = build_real (type, dconst10);
7886 exponent = CALL_EXPR_ARG (arg, 0);
7888 CASE_FLT_FN (BUILT_IN_SQRT):
7889 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7890 x = CALL_EXPR_ARG (arg, 0);
7891 exponent = build_real (type, dconsthalf);
7893 CASE_FLT_FN (BUILT_IN_CBRT):
7894 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7895 x = CALL_EXPR_ARG (arg, 0);
7896 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7899 CASE_FLT_FN (BUILT_IN_POW):
7900 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7901 x = CALL_EXPR_ARG (arg, 0);
7902 exponent = CALL_EXPR_ARG (arg, 1);
7908 /* Now perform the optimization. */
7911 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7912 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7920 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7921 NULL_TREE if no simplification can be made. */
7924 fold_builtin_hypot (location_t loc, tree fndecl,
7925 tree arg0, tree arg1, tree type)
7927 tree res, narg0, narg1;
7929 if (!validate_arg (arg0, REAL_TYPE)
7930 || !validate_arg (arg1, REAL_TYPE))
7933 /* Calculate the result when the argument is a constant. */
7934 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7937 /* If either argument to hypot has a negate or abs, strip that off.
7938 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7939 narg0 = fold_strip_sign_ops (arg0);
7940 narg1 = fold_strip_sign_ops (arg1);
7943 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7944 narg1 ? narg1 : arg1);
7947 /* If either argument is zero, hypot is fabs of the other. */
7948 if (real_zerop (arg0))
7949 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7950 else if (real_zerop (arg1))
7951 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7953 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7954 if (flag_unsafe_math_optimizations
7955 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7957 const REAL_VALUE_TYPE sqrt2_trunc
7958 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7959 return fold_build2_loc (loc, MULT_EXPR, type,
7960 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7961 build_real (type, sqrt2_trunc));
7968 /* Fold a builtin function call to pow, powf, or powl. Return
7969 NULL_TREE if no simplification can be made. */
7971 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7975 if (!validate_arg (arg0, REAL_TYPE)
7976 || !validate_arg (arg1, REAL_TYPE))
7979 /* Calculate the result when the argument is a constant. */
7980 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7983 /* Optimize pow(1.0,y) = 1.0. */
7984 if (real_onep (arg0))
7985 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7987 if (TREE_CODE (arg1) == REAL_CST
7988 && !TREE_OVERFLOW (arg1))
7990 REAL_VALUE_TYPE cint;
7994 c = TREE_REAL_CST (arg1);
7996 /* Optimize pow(x,0.0) = 1.0. */
7997 if (REAL_VALUES_EQUAL (c, dconst0))
7998 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8001 /* Optimize pow(x,1.0) = x. */
8002 if (REAL_VALUES_EQUAL (c, dconst1))
8005 /* Optimize pow(x,-1.0) = 1.0/x. */
8006 if (REAL_VALUES_EQUAL (c, dconstm1))
8007 return fold_build2_loc (loc, RDIV_EXPR, type,
8008 build_real (type, dconst1), arg0);
8010 /* Optimize pow(x,0.5) = sqrt(x). */
8011 if (flag_unsafe_math_optimizations
8012 && REAL_VALUES_EQUAL (c, dconsthalf))
8014 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8016 if (sqrtfn != NULL_TREE)
8017 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8020 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8021 if (flag_unsafe_math_optimizations)
8023 const REAL_VALUE_TYPE dconstroot
8024 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8026 if (REAL_VALUES_EQUAL (c, dconstroot))
8028 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8029 if (cbrtfn != NULL_TREE)
8030 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8034 /* Check for an integer exponent. */
8035 n = real_to_integer (&c);
8036 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8037 if (real_identical (&c, &cint))
8039 /* Attempt to evaluate pow at compile-time, unless this should
8040 raise an exception. */
8041 if (TREE_CODE (arg0) == REAL_CST
8042 && !TREE_OVERFLOW (arg0)
8044 || (!flag_trapping_math && !flag_errno_math)
8045 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8050 x = TREE_REAL_CST (arg0);
8051 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8052 if (flag_unsafe_math_optimizations || !inexact)
8053 return build_real (type, x);
8056 /* Strip sign ops from even integer powers. */
8057 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8059 tree narg0 = fold_strip_sign_ops (arg0);
8061 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8066 if (flag_unsafe_math_optimizations)
8068 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8070 /* Optimize pow(expN(x),y) = expN(x*y). */
8071 if (BUILTIN_EXPONENT_P (fcode))
8073 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8074 tree arg = CALL_EXPR_ARG (arg0, 0);
8075 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8076 return build_call_expr_loc (loc, expfn, 1, arg);
8079 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8080 if (BUILTIN_SQRT_P (fcode))
8082 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8083 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8084 build_real (type, dconsthalf));
8085 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8088 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8089 if (BUILTIN_CBRT_P (fcode))
8091 tree arg = CALL_EXPR_ARG (arg0, 0);
8092 if (tree_expr_nonnegative_p (arg))
8094 const REAL_VALUE_TYPE dconstroot
8095 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8096 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8097 build_real (type, dconstroot));
8098 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8102 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8103 if (fcode == BUILT_IN_POW
8104 || fcode == BUILT_IN_POWF
8105 || fcode == BUILT_IN_POWL)
8107 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8108 if (tree_expr_nonnegative_p (arg00))
8110 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8111 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8112 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8120 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8121 Return NULL_TREE if no simplification can be made. */
8123 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8124 tree arg0, tree arg1, tree type)
8126 if (!validate_arg (arg0, REAL_TYPE)
8127 || !validate_arg (arg1, INTEGER_TYPE))
8130 /* Optimize pow(1.0,y) = 1.0. */
8131 if (real_onep (arg0))
8132 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8134 if (host_integerp (arg1, 0))
8136 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8138 /* Evaluate powi at compile-time. */
8139 if (TREE_CODE (arg0) == REAL_CST
8140 && !TREE_OVERFLOW (arg0))
8143 x = TREE_REAL_CST (arg0);
8144 real_powi (&x, TYPE_MODE (type), &x, c);
8145 return build_real (type, x);
8148 /* Optimize pow(x,0) = 1.0. */
8150 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8153 /* Optimize pow(x,1) = x. */
8157 /* Optimize pow(x,-1) = 1.0/x. */
8159 return fold_build2_loc (loc, RDIV_EXPR, type,
8160 build_real (type, dconst1), arg0);
8166 /* A subroutine of fold_builtin to fold the various exponent
8167 functions. Return NULL_TREE if no simplification can be made.
8168 FUNC is the corresponding MPFR exponent function. */
8171 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8172 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8174 if (validate_arg (arg, REAL_TYPE))
8176 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8179 /* Calculate the result when the argument is a constant. */
8180 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8183 /* Optimize expN(logN(x)) = x. */
8184 if (flag_unsafe_math_optimizations)
8186 const enum built_in_function fcode = builtin_mathfn_code (arg);
8188 if ((func == mpfr_exp
8189 && (fcode == BUILT_IN_LOG
8190 || fcode == BUILT_IN_LOGF
8191 || fcode == BUILT_IN_LOGL))
8192 || (func == mpfr_exp2
8193 && (fcode == BUILT_IN_LOG2
8194 || fcode == BUILT_IN_LOG2F
8195 || fcode == BUILT_IN_LOG2L))
8196 || (func == mpfr_exp10
8197 && (fcode == BUILT_IN_LOG10
8198 || fcode == BUILT_IN_LOG10F
8199 || fcode == BUILT_IN_LOG10L)))
8200 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8207 /* Return true if VAR is a VAR_DECL or a component thereof. */
8210 var_decl_component_p (tree var)
8213 while (handled_component_p (inner))
8214 inner = TREE_OPERAND (inner, 0);
8215 return SSA_VAR_P (inner);
8218 /* Fold function call to builtin memset. Return
8219 NULL_TREE if no simplification can be made. */
8222 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8223 tree type, bool ignore)
8225 tree var, ret, etype;
8226 unsigned HOST_WIDE_INT length, cval;
8228 if (! validate_arg (dest, POINTER_TYPE)
8229 || ! validate_arg (c, INTEGER_TYPE)
8230 || ! validate_arg (len, INTEGER_TYPE))
8233 if (! host_integerp (len, 1))
8236 /* If the LEN parameter is zero, return DEST. */
8237 if (integer_zerop (len))
8238 return omit_one_operand_loc (loc, type, dest, c);
8240 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8245 if (TREE_CODE (var) != ADDR_EXPR)
8248 var = TREE_OPERAND (var, 0);
8249 if (TREE_THIS_VOLATILE (var))
8252 etype = TREE_TYPE (var);
8253 if (TREE_CODE (etype) == ARRAY_TYPE)
8254 etype = TREE_TYPE (etype);
8256 if (!INTEGRAL_TYPE_P (etype)
8257 && !POINTER_TYPE_P (etype))
8260 if (! var_decl_component_p (var))
8263 length = tree_low_cst (len, 1);
8264 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8265 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8269 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8272 if (integer_zerop (c))
8276 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8279 cval = tree_low_cst (c, 1);
8283 cval |= (cval << 31) << 1;
8286 ret = build_int_cst_type (etype, cval);
8287 var = build_fold_indirect_ref_loc (loc,
8288 fold_convert_loc (loc,
8289 build_pointer_type (etype),
8291 ret = build2 (MODIFY_EXPR, etype, var, ret);
8295 return omit_one_operand_loc (loc, type, dest, ret);
8298 /* Fold function call to builtin memset. Return
8299 NULL_TREE if no simplification can be made. */
8302 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8304 if (! validate_arg (dest, POINTER_TYPE)
8305 || ! validate_arg (size, INTEGER_TYPE))
8311 /* New argument list transforming bzero(ptr x, int y) to
8312 memset(ptr x, int 0, size_t y). This is done this way
8313 so that if it isn't expanded inline, we fallback to
8314 calling bzero instead of memset. */
8316 return fold_builtin_memset (loc, dest, integer_zero_node,
8317 fold_convert_loc (loc, sizetype, size),
8318 void_type_node, ignore);
8321 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8322 NULL_TREE if no simplification can be made.
8323 If ENDP is 0, return DEST (like memcpy).
8324 If ENDP is 1, return DEST+LEN (like mempcpy).
8325 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8326 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8330 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8331 tree len, tree type, bool ignore, int endp)
8333 tree destvar, srcvar, expr;
8335 if (! validate_arg (dest, POINTER_TYPE)
8336 || ! validate_arg (src, POINTER_TYPE)
8337 || ! validate_arg (len, INTEGER_TYPE))
8340 /* If the LEN parameter is zero, return DEST. */
8341 if (integer_zerop (len))
8342 return omit_one_operand_loc (loc, type, dest, src);
8344 /* If SRC and DEST are the same (and not volatile), return
8345 DEST{,+LEN,+LEN-1}. */
8346 if (operand_equal_p (src, dest, 0))
8350 tree srctype, desttype;
8351 int src_align, dest_align;
8356 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8357 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8359 /* Both DEST and SRC must be pointer types.
8360 ??? This is what old code did. Is the testing for pointer types
8363 If either SRC is readonly or length is 1, we can use memcpy. */
8364 if (!dest_align || !src_align)
8366 if (readonly_data_expr (src)
8367 || (host_integerp (len, 1)
8368 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8369 >= tree_low_cst (len, 1))))
8371 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8374 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8377 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8378 if (TREE_CODE (src) == ADDR_EXPR
8379 && TREE_CODE (dest) == ADDR_EXPR)
8381 tree src_base, dest_base, fn;
8382 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8383 HOST_WIDE_INT size = -1;
8384 HOST_WIDE_INT maxsize = -1;
8386 srcvar = TREE_OPERAND (src, 0);
8387 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8389 destvar = TREE_OPERAND (dest, 0);
8390 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8392 if (host_integerp (len, 1))
8393 maxsize = tree_low_cst (len, 1);
8396 src_offset /= BITS_PER_UNIT;
8397 dest_offset /= BITS_PER_UNIT;
8398 if (SSA_VAR_P (src_base)
8399 && SSA_VAR_P (dest_base))
8401 if (operand_equal_p (src_base, dest_base, 0)
8402 && ranges_overlap_p (src_offset, maxsize,
8403 dest_offset, maxsize))
8406 else if (TREE_CODE (src_base) == MEM_REF
8407 && TREE_CODE (dest_base) == MEM_REF)
8410 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8411 TREE_OPERAND (dest_base, 0), 0))
8413 off = double_int_add (mem_ref_offset (src_base),
8414 shwi_to_double_int (src_offset));
8415 if (!double_int_fits_in_shwi_p (off))
8417 src_offset = off.low;
8418 off = double_int_add (mem_ref_offset (dest_base),
8419 shwi_to_double_int (dest_offset));
8420 if (!double_int_fits_in_shwi_p (off))
8422 dest_offset = off.low;
8423 if (ranges_overlap_p (src_offset, maxsize,
8424 dest_offset, maxsize))
8430 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8433 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8438 if (!host_integerp (len, 0))
8441 This logic lose for arguments like (type *)malloc (sizeof (type)),
8442 since we strip the casts of up to VOID return value from malloc.
8443 Perhaps we ought to inherit type from non-VOID argument here? */
8446 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8447 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8449 tree tem = TREE_OPERAND (src, 0);
8451 if (tem != TREE_OPERAND (src, 0))
8452 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8454 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8456 tree tem = TREE_OPERAND (dest, 0);
8458 if (tem != TREE_OPERAND (dest, 0))
8459 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8461 srctype = TREE_TYPE (TREE_TYPE (src));
8463 && TREE_CODE (srctype) == ARRAY_TYPE
8464 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8466 srctype = TREE_TYPE (srctype);
8468 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8470 desttype = TREE_TYPE (TREE_TYPE (dest));
8472 && TREE_CODE (desttype) == ARRAY_TYPE
8473 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8475 desttype = TREE_TYPE (desttype);
8477 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8479 if (!srctype || !desttype
8480 || TREE_ADDRESSABLE (srctype)
8481 || TREE_ADDRESSABLE (desttype)
8482 || !TYPE_SIZE_UNIT (srctype)
8483 || !TYPE_SIZE_UNIT (desttype)
8484 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8485 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8488 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8489 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8490 if (dest_align < (int) TYPE_ALIGN (desttype)
8491 || src_align < (int) TYPE_ALIGN (srctype))
8495 dest = builtin_save_expr (dest);
8497 /* Build accesses at offset zero with a ref-all character type. */
8498 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8499 ptr_mode, true), 0);
8502 STRIP_NOPS (destvar);
8503 if (TREE_CODE (destvar) == ADDR_EXPR
8504 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8505 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8506 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8508 destvar = NULL_TREE;
8511 STRIP_NOPS (srcvar);
8512 if (TREE_CODE (srcvar) == ADDR_EXPR
8513 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8514 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8515 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8520 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8523 if (srcvar == NULL_TREE)
8526 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8528 else if (destvar == NULL_TREE)
8531 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8534 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8540 if (endp == 0 || endp == 3)
8541 return omit_one_operand_loc (loc, type, dest, expr);
8547 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8550 len = fold_convert_loc (loc, sizetype, len);
8551 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8552 dest = fold_convert_loc (loc, type, dest);
8554 dest = omit_one_operand_loc (loc, type, dest, expr);
8558 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8559 If LEN is not NULL, it represents the length of the string to be
8560 copied. Return NULL_TREE if no simplification can be made. */
8563 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8567 if (!validate_arg (dest, POINTER_TYPE)
8568 || !validate_arg (src, POINTER_TYPE))
8571 /* If SRC and DEST are the same (and not volatile), return DEST. */
8572 if (operand_equal_p (src, dest, 0))
8573 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8575 if (optimize_function_for_size_p (cfun))
8578 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8584 len = c_strlen (src, 1);
8585 if (! len || TREE_SIDE_EFFECTS (len))
8589 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8590 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8591 build_call_expr_loc (loc, fn, 3, dest, src, len));
8594 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8595 Return NULL_TREE if no simplification can be made. */
8598 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8600 tree fn, len, lenp1, call, type;
8602 if (!validate_arg (dest, POINTER_TYPE)
8603 || !validate_arg (src, POINTER_TYPE))
8606 len = c_strlen (src, 1);
8608 || TREE_CODE (len) != INTEGER_CST)
8611 if (optimize_function_for_size_p (cfun)
8612 /* If length is zero it's small enough. */
8613 && !integer_zerop (len))
8616 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8620 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8621 /* We use dest twice in building our expression. Save it from
8622 multiple expansions. */
8623 dest = builtin_save_expr (dest);
8624 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8626 type = TREE_TYPE (TREE_TYPE (fndecl));
8627 len = fold_convert_loc (loc, sizetype, len);
8628 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8629 dest = fold_convert_loc (loc, type, dest);
8630 dest = omit_one_operand_loc (loc, type, dest, call);
8634 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8635 If SLEN is not NULL, it represents the length of the source string.
8636 Return NULL_TREE if no simplification can be made. */
8639 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8640 tree src, tree len, tree slen)
8644 if (!validate_arg (dest, POINTER_TYPE)
8645 || !validate_arg (src, POINTER_TYPE)
8646 || !validate_arg (len, INTEGER_TYPE))
8649 /* If the LEN parameter is zero, return DEST. */
8650 if (integer_zerop (len))
8651 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8653 /* We can't compare slen with len as constants below if len is not a
8655 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8659 slen = c_strlen (src, 1);
8661 /* Now, we must be passed a constant src ptr parameter. */
8662 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8665 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8667 /* We do not support simplification of this case, though we do
8668 support it when expanding trees into RTL. */
8669 /* FIXME: generate a call to __builtin_memset. */
8670 if (tree_int_cst_lt (slen, len))
8673 /* OK transform into builtin memcpy. */
8674 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8677 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8678 build_call_expr_loc (loc, fn, 3, dest, src, len));
8681 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8682 arguments to the call, and TYPE is its return type.
8683 Return NULL_TREE if no simplification can be made. */
8686 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8688 if (!validate_arg (arg1, POINTER_TYPE)
8689 || !validate_arg (arg2, INTEGER_TYPE)
8690 || !validate_arg (len, INTEGER_TYPE))
8696 if (TREE_CODE (arg2) != INTEGER_CST
8697 || !host_integerp (len, 1))
8700 p1 = c_getstr (arg1);
8701 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8707 if (target_char_cast (arg2, &c))
8710 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8713 return build_int_cst (TREE_TYPE (arg1), 0);
8715 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8717 return fold_convert_loc (loc, type, tem);
8723 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8724 Return NULL_TREE if no simplification can be made. */
8727 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8729 const char *p1, *p2;
8731 if (!validate_arg (arg1, POINTER_TYPE)
8732 || !validate_arg (arg2, POINTER_TYPE)
8733 || !validate_arg (len, INTEGER_TYPE))
8736 /* If the LEN parameter is zero, return zero. */
8737 if (integer_zerop (len))
8738 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8741 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8742 if (operand_equal_p (arg1, arg2, 0))
8743 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8745 p1 = c_getstr (arg1);
8746 p2 = c_getstr (arg2);
8748 /* If all arguments are constant, and the value of len is not greater
8749 than the lengths of arg1 and arg2, evaluate at compile-time. */
8750 if (host_integerp (len, 1) && p1 && p2
8751 && compare_tree_int (len, strlen (p1) + 1) <= 0
8752 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8754 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8757 return integer_one_node;
8759 return integer_minus_one_node;
8761 return integer_zero_node;
8764 /* If len parameter is one, return an expression corresponding to
8765 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8766 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8768 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8769 tree cst_uchar_ptr_node
8770 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8773 = fold_convert_loc (loc, integer_type_node,
8774 build1 (INDIRECT_REF, cst_uchar_node,
8775 fold_convert_loc (loc,
8779 = fold_convert_loc (loc, integer_type_node,
8780 build1 (INDIRECT_REF, cst_uchar_node,
8781 fold_convert_loc (loc,
8784 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8790 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8791 Return NULL_TREE if no simplification can be made. */
8794 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8796 const char *p1, *p2;
8798 if (!validate_arg (arg1, POINTER_TYPE)
8799 || !validate_arg (arg2, POINTER_TYPE))
8802 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8803 if (operand_equal_p (arg1, arg2, 0))
8804 return integer_zero_node;
8806 p1 = c_getstr (arg1);
8807 p2 = c_getstr (arg2);
8811 const int i = strcmp (p1, p2);
8813 return integer_minus_one_node;
8815 return integer_one_node;
8817 return integer_zero_node;
8820 /* If the second arg is "", return *(const unsigned char*)arg1. */
8821 if (p2 && *p2 == '\0')
8823 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8824 tree cst_uchar_ptr_node
8825 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8827 return fold_convert_loc (loc, integer_type_node,
8828 build1 (INDIRECT_REF, cst_uchar_node,
8829 fold_convert_loc (loc,
8834 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8835 if (p1 && *p1 == '\0')
8837 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8838 tree cst_uchar_ptr_node
8839 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8842 = fold_convert_loc (loc, integer_type_node,
8843 build1 (INDIRECT_REF, cst_uchar_node,
8844 fold_convert_loc (loc,
8847 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8853 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8854 Return NULL_TREE if no simplification can be made. */
8857 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8859 const char *p1, *p2;
8861 if (!validate_arg (arg1, POINTER_TYPE)
8862 || !validate_arg (arg2, POINTER_TYPE)
8863 || !validate_arg (len, INTEGER_TYPE))
8866 /* If the LEN parameter is zero, return zero. */
8867 if (integer_zerop (len))
8868 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8871 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8872 if (operand_equal_p (arg1, arg2, 0))
8873 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8875 p1 = c_getstr (arg1);
8876 p2 = c_getstr (arg2);
8878 if (host_integerp (len, 1) && p1 && p2)
8880 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8882 return integer_one_node;
8884 return integer_minus_one_node;
8886 return integer_zero_node;
8889 /* If the second arg is "", and the length is greater than zero,
8890 return *(const unsigned char*)arg1. */
8891 if (p2 && *p2 == '\0'
8892 && TREE_CODE (len) == INTEGER_CST
8893 && tree_int_cst_sgn (len) == 1)
8895 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8896 tree cst_uchar_ptr_node
8897 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8899 return fold_convert_loc (loc, integer_type_node,
8900 build1 (INDIRECT_REF, cst_uchar_node,
8901 fold_convert_loc (loc,
8906 /* If the first arg is "", and the length is greater than zero,
8907 return -*(const unsigned char*)arg2. */
8908 if (p1 && *p1 == '\0'
8909 && TREE_CODE (len) == INTEGER_CST
8910 && tree_int_cst_sgn (len) == 1)
8912 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8913 tree cst_uchar_ptr_node
8914 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8916 tree temp = fold_convert_loc (loc, integer_type_node,
8917 build1 (INDIRECT_REF, cst_uchar_node,
8918 fold_convert_loc (loc,
8921 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8924 /* If len parameter is one, return an expression corresponding to
8925 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8926 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8928 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8929 tree cst_uchar_ptr_node
8930 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8932 tree ind1 = fold_convert_loc (loc, integer_type_node,
8933 build1 (INDIRECT_REF, cst_uchar_node,
8934 fold_convert_loc (loc,
8937 tree ind2 = fold_convert_loc (loc, integer_type_node,
8938 build1 (INDIRECT_REF, cst_uchar_node,
8939 fold_convert_loc (loc,
8942 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8948 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8949 ARG. Return NULL_TREE if no simplification can be made. */
8952 fold_builtin_signbit (location_t loc, tree arg, tree type)
8956 if (!validate_arg (arg, REAL_TYPE))
8959 /* If ARG is a compile-time constant, determine the result. */
8960 if (TREE_CODE (arg) == REAL_CST
8961 && !TREE_OVERFLOW (arg))
8965 c = TREE_REAL_CST (arg);
8966 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8967 return fold_convert_loc (loc, type, temp);
8970 /* If ARG is non-negative, the result is always zero. */
8971 if (tree_expr_nonnegative_p (arg))
8972 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8974 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8975 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8976 return fold_build2_loc (loc, LT_EXPR, type, arg,
8977 build_real (TREE_TYPE (arg), dconst0));
8982 /* Fold function call to builtin copysign, copysignf or copysignl with
8983 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8987 fold_builtin_copysign (location_t loc, tree fndecl,
8988 tree arg1, tree arg2, tree type)
8992 if (!validate_arg (arg1, REAL_TYPE)
8993 || !validate_arg (arg2, REAL_TYPE))
8996 /* copysign(X,X) is X. */
8997 if (operand_equal_p (arg1, arg2, 0))
8998 return fold_convert_loc (loc, type, arg1);
9000 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9001 if (TREE_CODE (arg1) == REAL_CST
9002 && TREE_CODE (arg2) == REAL_CST
9003 && !TREE_OVERFLOW (arg1)
9004 && !TREE_OVERFLOW (arg2))
9006 REAL_VALUE_TYPE c1, c2;
9008 c1 = TREE_REAL_CST (arg1);
9009 c2 = TREE_REAL_CST (arg2);
9010 /* c1.sign := c2.sign. */
9011 real_copysign (&c1, &c2);
9012 return build_real (type, c1);
9015 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9016 Remember to evaluate Y for side-effects. */
9017 if (tree_expr_nonnegative_p (arg2))
9018 return omit_one_operand_loc (loc, type,
9019 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9022 /* Strip sign changing operations for the first argument. */
9023 tem = fold_strip_sign_ops (arg1);
9025 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9030 /* Fold a call to builtin isascii with argument ARG. */
9033 fold_builtin_isascii (location_t loc, tree arg)
9035 if (!validate_arg (arg, INTEGER_TYPE))
9039 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9040 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9041 build_int_cst (NULL_TREE,
9042 ~ (unsigned HOST_WIDE_INT) 0x7f));
9043 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9044 arg, integer_zero_node);
9048 /* Fold a call to builtin toascii with argument ARG. */
9051 fold_builtin_toascii (location_t loc, tree arg)
9053 if (!validate_arg (arg, INTEGER_TYPE))
9056 /* Transform toascii(c) -> (c & 0x7f). */
9057 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9058 build_int_cst (NULL_TREE, 0x7f));
9061 /* Fold a call to builtin isdigit with argument ARG. */
9064 fold_builtin_isdigit (location_t loc, tree arg)
9066 if (!validate_arg (arg, INTEGER_TYPE))
9070 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9071 /* According to the C standard, isdigit is unaffected by locale.
9072 However, it definitely is affected by the target character set. */
9073 unsigned HOST_WIDE_INT target_digit0
9074 = lang_hooks.to_target_charset ('0');
9076 if (target_digit0 == 0)
9079 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9080 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9081 build_int_cst (unsigned_type_node, target_digit0));
9082 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9083 build_int_cst (unsigned_type_node, 9));
9087 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9090 fold_builtin_fabs (location_t loc, tree arg, tree type)
9092 if (!validate_arg (arg, REAL_TYPE))
9095 arg = fold_convert_loc (loc, type, arg);
9096 if (TREE_CODE (arg) == REAL_CST)
9097 return fold_abs_const (arg, type);
9098 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9101 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9104 fold_builtin_abs (location_t loc, tree arg, tree type)
9106 if (!validate_arg (arg, INTEGER_TYPE))
9109 arg = fold_convert_loc (loc, type, arg);
9110 if (TREE_CODE (arg) == INTEGER_CST)
9111 return fold_abs_const (arg, type);
9112 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9115 /* Fold a call to builtin fmin or fmax. */
9118 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9119 tree type, bool max)
9121 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9123 /* Calculate the result when the argument is a constant. */
9124 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9129 /* If either argument is NaN, return the other one. Avoid the
9130 transformation if we get (and honor) a signalling NaN. Using
9131 omit_one_operand() ensures we create a non-lvalue. */
9132 if (TREE_CODE (arg0) == REAL_CST
9133 && real_isnan (&TREE_REAL_CST (arg0))
9134 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9135 || ! TREE_REAL_CST (arg0).signalling))
9136 return omit_one_operand_loc (loc, type, arg1, arg0);
9137 if (TREE_CODE (arg1) == REAL_CST
9138 && real_isnan (&TREE_REAL_CST (arg1))
9139 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9140 || ! TREE_REAL_CST (arg1).signalling))
9141 return omit_one_operand_loc (loc, type, arg0, arg1);
9143 /* Transform fmin/fmax(x,x) -> x. */
9144 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9145 return omit_one_operand_loc (loc, type, arg0, arg1);
9147 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9148 functions to return the numeric arg if the other one is NaN.
9149 These tree codes don't honor that, so only transform if
9150 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9151 handled, so we don't have to worry about it either. */
9152 if (flag_finite_math_only)
9153 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9154 fold_convert_loc (loc, type, arg0),
9155 fold_convert_loc (loc, type, arg1));
9160 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9163 fold_builtin_carg (location_t loc, tree arg, tree type)
9165 if (validate_arg (arg, COMPLEX_TYPE)
9166 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9168 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9172 tree new_arg = builtin_save_expr (arg);
9173 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9174 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9175 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9182 /* Fold a call to builtin logb/ilogb. */
9185 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9187 if (! validate_arg (arg, REAL_TYPE))
9192 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9194 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9200 /* If arg is Inf or NaN and we're logb, return it. */
9201 if (TREE_CODE (rettype) == REAL_TYPE)
9202 return fold_convert_loc (loc, rettype, arg);
9203 /* Fall through... */
9205 /* Zero may set errno and/or raise an exception for logb, also
9206 for ilogb we don't know FP_ILOGB0. */
9209 /* For normal numbers, proceed iff radix == 2. In GCC,
9210 normalized significands are in the range [0.5, 1.0). We
9211 want the exponent as if they were [1.0, 2.0) so get the
9212 exponent and subtract 1. */
9213 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9214 return fold_convert_loc (loc, rettype,
9215 build_int_cst (NULL_TREE,
9216 REAL_EXP (value)-1));
9224 /* Fold a call to builtin significand, if radix == 2. */
9227 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9229 if (! validate_arg (arg, REAL_TYPE))
9234 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9236 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9243 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9244 return fold_convert_loc (loc, rettype, arg);
9246 /* For normal numbers, proceed iff radix == 2. */
9247 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9249 REAL_VALUE_TYPE result = *value;
9250 /* In GCC, normalized significands are in the range [0.5,
9251 1.0). We want them to be [1.0, 2.0) so set the
9253 SET_REAL_EXP (&result, 1);
9254 return build_real (rettype, result);
9263 /* Fold a call to builtin frexp, we can assume the base is 2. */
9266 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9268 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9273 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9276 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9278 /* Proceed if a valid pointer type was passed in. */
9279 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9281 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9287 /* For +-0, return (*exp = 0, +-0). */
9288 exp = integer_zero_node;
9293 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9294 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9297 /* Since the frexp function always expects base 2, and in
9298 GCC normalized significands are already in the range
9299 [0.5, 1.0), we have exactly what frexp wants. */
9300 REAL_VALUE_TYPE frac_rvt = *value;
9301 SET_REAL_EXP (&frac_rvt, 0);
9302 frac = build_real (rettype, frac_rvt);
9303 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9310 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9311 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9312 TREE_SIDE_EFFECTS (arg1) = 1;
9313 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9319 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9320 then we can assume the base is two. If it's false, then we have to
9321 check the mode of the TYPE parameter in certain cases. */
9324 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9325 tree type, bool ldexp)
9327 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9332 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9333 if (real_zerop (arg0) || integer_zerop (arg1)
9334 || (TREE_CODE (arg0) == REAL_CST
9335 && !real_isfinite (&TREE_REAL_CST (arg0))))
9336 return omit_one_operand_loc (loc, type, arg0, arg1);
9338 /* If both arguments are constant, then try to evaluate it. */
9339 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9340 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9341 && host_integerp (arg1, 0))
9343 /* Bound the maximum adjustment to twice the range of the
9344 mode's valid exponents. Use abs to ensure the range is
9345 positive as a sanity check. */
9346 const long max_exp_adj = 2 *
9347 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9348 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9350 /* Get the user-requested adjustment. */
9351 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9353 /* The requested adjustment must be inside this range. This
9354 is a preliminary cap to avoid things like overflow, we
9355 may still fail to compute the result for other reasons. */
9356 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9358 REAL_VALUE_TYPE initial_result;
9360 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9362 /* Ensure we didn't overflow. */
9363 if (! real_isinf (&initial_result))
9365 const REAL_VALUE_TYPE trunc_result
9366 = real_value_truncate (TYPE_MODE (type), initial_result);
9368 /* Only proceed if the target mode can hold the
9370 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9371 return build_real (type, trunc_result);
9380 /* Fold a call to builtin modf. */
9383 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9385 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9390 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9393 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9395 /* Proceed if a valid pointer type was passed in. */
9396 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9398 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9399 REAL_VALUE_TYPE trunc, frac;
9405 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9406 trunc = frac = *value;
9409 /* For +-Inf, return (*arg1 = arg0, +-0). */
9411 frac.sign = value->sign;
9415 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9416 real_trunc (&trunc, VOIDmode, value);
9417 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9418 /* If the original number was negative and already
9419 integral, then the fractional part is -0.0. */
9420 if (value->sign && frac.cl == rvc_zero)
9421 frac.sign = value->sign;
9425 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9426 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9427 build_real (rettype, trunc));
9428 TREE_SIDE_EFFECTS (arg1) = 1;
9429 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9430 build_real (rettype, frac));
9436 /* Given a location LOC, an interclass builtin function decl FNDECL
9437 and its single argument ARG, return an folded expression computing
9438 the same, or NULL_TREE if we either couldn't or didn't want to fold
9439 (the latter happen if there's an RTL instruction available). */
9442 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9444 enum machine_mode mode;
9446 if (!validate_arg (arg, REAL_TYPE))
9449 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9452 mode = TYPE_MODE (TREE_TYPE (arg));
9454 /* If there is no optab, try generic code. */
9455 switch (DECL_FUNCTION_CODE (fndecl))
9459 CASE_FLT_FN (BUILT_IN_ISINF):
9461 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9462 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9463 tree const type = TREE_TYPE (arg);
9467 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9468 real_from_string (&r, buf);
9469 result = build_call_expr (isgr_fn, 2,
9470 fold_build1_loc (loc, ABS_EXPR, type, arg),
9471 build_real (type, r));
9474 CASE_FLT_FN (BUILT_IN_FINITE):
9475 case BUILT_IN_ISFINITE:
9477 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9478 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9479 tree const type = TREE_TYPE (arg);
9483 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9484 real_from_string (&r, buf);
9485 result = build_call_expr (isle_fn, 2,
9486 fold_build1_loc (loc, ABS_EXPR, type, arg),
9487 build_real (type, r));
9488 /*result = fold_build2_loc (loc, UNGT_EXPR,
9489 TREE_TYPE (TREE_TYPE (fndecl)),
9490 fold_build1_loc (loc, ABS_EXPR, type, arg),
9491 build_real (type, r));
9492 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9493 TREE_TYPE (TREE_TYPE (fndecl)),
9497 case BUILT_IN_ISNORMAL:
9499 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9500 islessequal(fabs(x),DBL_MAX). */
9501 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9502 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9503 tree const type = TREE_TYPE (arg);
9504 REAL_VALUE_TYPE rmax, rmin;
9507 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9508 real_from_string (&rmax, buf);
9509 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9510 real_from_string (&rmin, buf);
9511 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9512 result = build_call_expr (isle_fn, 2, arg,
9513 build_real (type, rmax));
9514 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9515 build_call_expr (isge_fn, 2, arg,
9516 build_real (type, rmin)));
9526 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9527 ARG is the argument for the call. */
9530 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9532 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9535 if (!validate_arg (arg, REAL_TYPE))
9538 switch (builtin_index)
9540 case BUILT_IN_ISINF:
9541 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9542 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9544 if (TREE_CODE (arg) == REAL_CST)
9546 r = TREE_REAL_CST (arg);
9547 if (real_isinf (&r))
9548 return real_compare (GT_EXPR, &r, &dconst0)
9549 ? integer_one_node : integer_minus_one_node;
9551 return integer_zero_node;
9556 case BUILT_IN_ISINF_SIGN:
9558 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9559 /* In a boolean context, GCC will fold the inner COND_EXPR to
9560 1. So e.g. "if (isinf_sign(x))" would be folded to just
9561 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9562 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9563 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9564 tree tmp = NULL_TREE;
9566 arg = builtin_save_expr (arg);
9568 if (signbit_fn && isinf_fn)
9570 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9571 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9573 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9574 signbit_call, integer_zero_node);
9575 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9576 isinf_call, integer_zero_node);
9578 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9579 integer_minus_one_node, integer_one_node);
9580 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9588 case BUILT_IN_ISFINITE:
9589 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9590 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9591 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9593 if (TREE_CODE (arg) == REAL_CST)
9595 r = TREE_REAL_CST (arg);
9596 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9601 case BUILT_IN_ISNAN:
9602 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9603 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9605 if (TREE_CODE (arg) == REAL_CST)
9607 r = TREE_REAL_CST (arg);
9608 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9611 arg = builtin_save_expr (arg);
9612 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9619 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9620 This builtin will generate code to return the appropriate floating
9621 point classification depending on the value of the floating point
9622 number passed in. The possible return values must be supplied as
9623 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9624 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9625 one floating point argument which is "type generic". */
9628 fold_builtin_fpclassify (location_t loc, tree exp)
9630 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9631 arg, type, res, tmp;
9632 enum machine_mode mode;
9636 /* Verify the required arguments in the original call. */
9637 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9638 INTEGER_TYPE, INTEGER_TYPE,
9639 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9642 fp_nan = CALL_EXPR_ARG (exp, 0);
9643 fp_infinite = CALL_EXPR_ARG (exp, 1);
9644 fp_normal = CALL_EXPR_ARG (exp, 2);
9645 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9646 fp_zero = CALL_EXPR_ARG (exp, 4);
9647 arg = CALL_EXPR_ARG (exp, 5);
9648 type = TREE_TYPE (arg);
9649 mode = TYPE_MODE (type);
9650 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9654 (fabs(x) == Inf ? FP_INFINITE :
9655 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9656 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9658 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9659 build_real (type, dconst0));
9660 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9661 tmp, fp_zero, fp_subnormal);
9663 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9664 real_from_string (&r, buf);
9665 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9666 arg, build_real (type, r));
9667 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9669 if (HONOR_INFINITIES (mode))
9672 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9673 build_real (type, r));
9674 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9678 if (HONOR_NANS (mode))
9680 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9681 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9687 /* Fold a call to an unordered comparison function such as
9688 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9689 being called and ARG0 and ARG1 are the arguments for the call.
9690 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9691 the opposite of the desired result. UNORDERED_CODE is used
9692 for modes that can hold NaNs and ORDERED_CODE is used for
9696 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9697 enum tree_code unordered_code,
9698 enum tree_code ordered_code)
9700 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9701 enum tree_code code;
9703 enum tree_code code0, code1;
9704 tree cmp_type = NULL_TREE;
9706 type0 = TREE_TYPE (arg0);
9707 type1 = TREE_TYPE (arg1);
9709 code0 = TREE_CODE (type0);
9710 code1 = TREE_CODE (type1);
9712 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9713 /* Choose the wider of two real types. */
9714 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9716 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9718 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9721 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9722 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9724 if (unordered_code == UNORDERED_EXPR)
9726 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9727 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9728 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9731 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9733 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9734 fold_build2_loc (loc, code, type, arg0, arg1));
9737 /* Fold a call to built-in function FNDECL with 0 arguments.
9738 IGNORE is true if the result of the function call is ignored. This
9739 function returns NULL_TREE if no simplification was possible. */
9742 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9744 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9745 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9748 CASE_FLT_FN (BUILT_IN_INF):
9749 case BUILT_IN_INFD32:
9750 case BUILT_IN_INFD64:
9751 case BUILT_IN_INFD128:
9752 return fold_builtin_inf (loc, type, true);
9754 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9755 return fold_builtin_inf (loc, type, false);
9757 case BUILT_IN_CLASSIFY_TYPE:
9758 return fold_builtin_classify_type (NULL_TREE);
9766 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9767 IGNORE is true if the result of the function call is ignored. This
9768 function returns NULL_TREE if no simplification was possible. */
9771 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9773 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9774 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9777 case BUILT_IN_CONSTANT_P:
9779 tree val = fold_builtin_constant_p (arg0);
9781 /* Gimplification will pull the CALL_EXPR for the builtin out of
9782 an if condition. When not optimizing, we'll not CSE it back.
9783 To avoid link error types of regressions, return false now. */
9784 if (!val && !optimize)
9785 val = integer_zero_node;
9790 case BUILT_IN_CLASSIFY_TYPE:
9791 return fold_builtin_classify_type (arg0);
9793 case BUILT_IN_STRLEN:
9794 return fold_builtin_strlen (loc, type, arg0);
9796 CASE_FLT_FN (BUILT_IN_FABS):
9797 return fold_builtin_fabs (loc, arg0, type);
9801 case BUILT_IN_LLABS:
9802 case BUILT_IN_IMAXABS:
9803 return fold_builtin_abs (loc, arg0, type);
9805 CASE_FLT_FN (BUILT_IN_CONJ):
9806 if (validate_arg (arg0, COMPLEX_TYPE)
9807 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9808 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9811 CASE_FLT_FN (BUILT_IN_CREAL):
9812 if (validate_arg (arg0, COMPLEX_TYPE)
9813 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9814 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9817 CASE_FLT_FN (BUILT_IN_CIMAG):
9818 if (validate_arg (arg0, COMPLEX_TYPE)
9819 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9820 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9823 CASE_FLT_FN (BUILT_IN_CCOS):
9824 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9826 CASE_FLT_FN (BUILT_IN_CCOSH):
9827 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9829 CASE_FLT_FN (BUILT_IN_CPROJ):
9830 return fold_builtin_cproj(loc, arg0, type);
9832 CASE_FLT_FN (BUILT_IN_CSIN):
9833 if (validate_arg (arg0, COMPLEX_TYPE)
9834 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9835 return do_mpc_arg1 (arg0, type, mpc_sin);
9838 CASE_FLT_FN (BUILT_IN_CSINH):
9839 if (validate_arg (arg0, COMPLEX_TYPE)
9840 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9841 return do_mpc_arg1 (arg0, type, mpc_sinh);
9844 CASE_FLT_FN (BUILT_IN_CTAN):
9845 if (validate_arg (arg0, COMPLEX_TYPE)
9846 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9847 return do_mpc_arg1 (arg0, type, mpc_tan);
9850 CASE_FLT_FN (BUILT_IN_CTANH):
9851 if (validate_arg (arg0, COMPLEX_TYPE)
9852 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9853 return do_mpc_arg1 (arg0, type, mpc_tanh);
9856 CASE_FLT_FN (BUILT_IN_CLOG):
9857 if (validate_arg (arg0, COMPLEX_TYPE)
9858 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9859 return do_mpc_arg1 (arg0, type, mpc_log);
9862 CASE_FLT_FN (BUILT_IN_CSQRT):
9863 if (validate_arg (arg0, COMPLEX_TYPE)
9864 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9865 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9868 CASE_FLT_FN (BUILT_IN_CASIN):
9869 if (validate_arg (arg0, COMPLEX_TYPE)
9870 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9871 return do_mpc_arg1 (arg0, type, mpc_asin);
9874 CASE_FLT_FN (BUILT_IN_CACOS):
9875 if (validate_arg (arg0, COMPLEX_TYPE)
9876 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9877 return do_mpc_arg1 (arg0, type, mpc_acos);
9880 CASE_FLT_FN (BUILT_IN_CATAN):
9881 if (validate_arg (arg0, COMPLEX_TYPE)
9882 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9883 return do_mpc_arg1 (arg0, type, mpc_atan);
9886 CASE_FLT_FN (BUILT_IN_CASINH):
9887 if (validate_arg (arg0, COMPLEX_TYPE)
9888 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9889 return do_mpc_arg1 (arg0, type, mpc_asinh);
9892 CASE_FLT_FN (BUILT_IN_CACOSH):
9893 if (validate_arg (arg0, COMPLEX_TYPE)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9895 return do_mpc_arg1 (arg0, type, mpc_acosh);
9898 CASE_FLT_FN (BUILT_IN_CATANH):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return do_mpc_arg1 (arg0, type, mpc_atanh);
9904 CASE_FLT_FN (BUILT_IN_CABS):
9905 return fold_builtin_cabs (loc, arg0, type, fndecl);
9907 CASE_FLT_FN (BUILT_IN_CARG):
9908 return fold_builtin_carg (loc, arg0, type);
9910 CASE_FLT_FN (BUILT_IN_SQRT):
9911 return fold_builtin_sqrt (loc, arg0, type);
9913 CASE_FLT_FN (BUILT_IN_CBRT):
9914 return fold_builtin_cbrt (loc, arg0, type);
9916 CASE_FLT_FN (BUILT_IN_ASIN):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9919 &dconstm1, &dconst1, true);
9922 CASE_FLT_FN (BUILT_IN_ACOS):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9925 &dconstm1, &dconst1, true);
9928 CASE_FLT_FN (BUILT_IN_ATAN):
9929 if (validate_arg (arg0, REAL_TYPE))
9930 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9933 CASE_FLT_FN (BUILT_IN_ASINH):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9938 CASE_FLT_FN (BUILT_IN_ACOSH):
9939 if (validate_arg (arg0, REAL_TYPE))
9940 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9941 &dconst1, NULL, true);
9944 CASE_FLT_FN (BUILT_IN_ATANH):
9945 if (validate_arg (arg0, REAL_TYPE))
9946 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9947 &dconstm1, &dconst1, false);
9950 CASE_FLT_FN (BUILT_IN_SIN):
9951 if (validate_arg (arg0, REAL_TYPE))
9952 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9955 CASE_FLT_FN (BUILT_IN_COS):
9956 return fold_builtin_cos (loc, arg0, type, fndecl);
9958 CASE_FLT_FN (BUILT_IN_TAN):
9959 return fold_builtin_tan (arg0, type);
9961 CASE_FLT_FN (BUILT_IN_CEXP):
9962 return fold_builtin_cexp (loc, arg0, type);
9964 CASE_FLT_FN (BUILT_IN_CEXPI):
9965 if (validate_arg (arg0, REAL_TYPE))
9966 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9969 CASE_FLT_FN (BUILT_IN_SINH):
9970 if (validate_arg (arg0, REAL_TYPE))
9971 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9974 CASE_FLT_FN (BUILT_IN_COSH):
9975 return fold_builtin_cosh (loc, arg0, type, fndecl);
9977 CASE_FLT_FN (BUILT_IN_TANH):
9978 if (validate_arg (arg0, REAL_TYPE))
9979 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9982 CASE_FLT_FN (BUILT_IN_ERF):
9983 if (validate_arg (arg0, REAL_TYPE))
9984 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9987 CASE_FLT_FN (BUILT_IN_ERFC):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9992 CASE_FLT_FN (BUILT_IN_TGAMMA):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9997 CASE_FLT_FN (BUILT_IN_EXP):
9998 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10000 CASE_FLT_FN (BUILT_IN_EXP2):
10001 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10003 CASE_FLT_FN (BUILT_IN_EXP10):
10004 CASE_FLT_FN (BUILT_IN_POW10):
10005 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10007 CASE_FLT_FN (BUILT_IN_EXPM1):
10008 if (validate_arg (arg0, REAL_TYPE))
10009 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10012 CASE_FLT_FN (BUILT_IN_LOG):
10013 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10015 CASE_FLT_FN (BUILT_IN_LOG2):
10016 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10018 CASE_FLT_FN (BUILT_IN_LOG10):
10019 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10021 CASE_FLT_FN (BUILT_IN_LOG1P):
10022 if (validate_arg (arg0, REAL_TYPE))
10023 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10024 &dconstm1, NULL, false);
10027 CASE_FLT_FN (BUILT_IN_J0):
10028 if (validate_arg (arg0, REAL_TYPE))
10029 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10033 CASE_FLT_FN (BUILT_IN_J1):
10034 if (validate_arg (arg0, REAL_TYPE))
10035 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10039 CASE_FLT_FN (BUILT_IN_Y0):
10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10042 &dconst0, NULL, false);
10045 CASE_FLT_FN (BUILT_IN_Y1):
10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10048 &dconst0, NULL, false);
10051 CASE_FLT_FN (BUILT_IN_NAN):
10052 case BUILT_IN_NAND32:
10053 case BUILT_IN_NAND64:
10054 case BUILT_IN_NAND128:
10055 return fold_builtin_nan (arg0, type, true);
10057 CASE_FLT_FN (BUILT_IN_NANS):
10058 return fold_builtin_nan (arg0, type, false);
10060 CASE_FLT_FN (BUILT_IN_FLOOR):
10061 return fold_builtin_floor (loc, fndecl, arg0);
10063 CASE_FLT_FN (BUILT_IN_CEIL):
10064 return fold_builtin_ceil (loc, fndecl, arg0);
10066 CASE_FLT_FN (BUILT_IN_TRUNC):
10067 return fold_builtin_trunc (loc, fndecl, arg0);
10069 CASE_FLT_FN (BUILT_IN_ROUND):
10070 return fold_builtin_round (loc, fndecl, arg0);
10072 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10073 CASE_FLT_FN (BUILT_IN_RINT):
10074 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10076 CASE_FLT_FN (BUILT_IN_LCEIL):
10077 CASE_FLT_FN (BUILT_IN_LLCEIL):
10078 CASE_FLT_FN (BUILT_IN_LFLOOR):
10079 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10080 CASE_FLT_FN (BUILT_IN_LROUND):
10081 CASE_FLT_FN (BUILT_IN_LLROUND):
10082 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10084 CASE_FLT_FN (BUILT_IN_LRINT):
10085 CASE_FLT_FN (BUILT_IN_LLRINT):
10086 return fold_fixed_mathfn (loc, fndecl, arg0);
10088 case BUILT_IN_BSWAP32:
10089 case BUILT_IN_BSWAP64:
10090 return fold_builtin_bswap (fndecl, arg0);
10092 CASE_INT_FN (BUILT_IN_FFS):
10093 CASE_INT_FN (BUILT_IN_CLZ):
10094 CASE_INT_FN (BUILT_IN_CTZ):
10095 CASE_INT_FN (BUILT_IN_POPCOUNT):
10096 CASE_INT_FN (BUILT_IN_PARITY):
10097 return fold_builtin_bitop (fndecl, arg0);
10099 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10100 return fold_builtin_signbit (loc, arg0, type);
10102 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10103 return fold_builtin_significand (loc, arg0, type);
10105 CASE_FLT_FN (BUILT_IN_ILOGB):
10106 CASE_FLT_FN (BUILT_IN_LOGB):
10107 return fold_builtin_logb (loc, arg0, type);
10109 case BUILT_IN_ISASCII:
10110 return fold_builtin_isascii (loc, arg0);
10112 case BUILT_IN_TOASCII:
10113 return fold_builtin_toascii (loc, arg0);
10115 case BUILT_IN_ISDIGIT:
10116 return fold_builtin_isdigit (loc, arg0);
10118 CASE_FLT_FN (BUILT_IN_FINITE):
10119 case BUILT_IN_FINITED32:
10120 case BUILT_IN_FINITED64:
10121 case BUILT_IN_FINITED128:
10122 case BUILT_IN_ISFINITE:
10124 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10127 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10130 CASE_FLT_FN (BUILT_IN_ISINF):
10131 case BUILT_IN_ISINFD32:
10132 case BUILT_IN_ISINFD64:
10133 case BUILT_IN_ISINFD128:
10135 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10138 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10141 case BUILT_IN_ISNORMAL:
10142 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10144 case BUILT_IN_ISINF_SIGN:
10145 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10147 CASE_FLT_FN (BUILT_IN_ISNAN):
10148 case BUILT_IN_ISNAND32:
10149 case BUILT_IN_ISNAND64:
10150 case BUILT_IN_ISNAND128:
10151 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10153 case BUILT_IN_PRINTF:
10154 case BUILT_IN_PRINTF_UNLOCKED:
10155 case BUILT_IN_VPRINTF:
10156 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10158 case BUILT_IN_FREE:
10159 if (integer_zerop (arg0))
10160 return build_empty_stmt (loc);
10171 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10172 IGNORE is true if the result of the function call is ignored. This
10173 function returns NULL_TREE if no simplification was possible. */
10176 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10178 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10179 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10183 CASE_FLT_FN (BUILT_IN_JN):
10184 if (validate_arg (arg0, INTEGER_TYPE)
10185 && validate_arg (arg1, REAL_TYPE))
10186 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10189 CASE_FLT_FN (BUILT_IN_YN):
10190 if (validate_arg (arg0, INTEGER_TYPE)
10191 && validate_arg (arg1, REAL_TYPE))
10192 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10196 CASE_FLT_FN (BUILT_IN_DREM):
10197 CASE_FLT_FN (BUILT_IN_REMAINDER):
10198 if (validate_arg (arg0, REAL_TYPE)
10199 && validate_arg(arg1, REAL_TYPE))
10200 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10203 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10204 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10205 if (validate_arg (arg0, REAL_TYPE)
10206 && validate_arg(arg1, POINTER_TYPE))
10207 return do_mpfr_lgamma_r (arg0, arg1, type);
10210 CASE_FLT_FN (BUILT_IN_ATAN2):
10211 if (validate_arg (arg0, REAL_TYPE)
10212 && validate_arg(arg1, REAL_TYPE))
10213 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10216 CASE_FLT_FN (BUILT_IN_FDIM):
10217 if (validate_arg (arg0, REAL_TYPE)
10218 && validate_arg(arg1, REAL_TYPE))
10219 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10222 CASE_FLT_FN (BUILT_IN_HYPOT):
10223 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10225 CASE_FLT_FN (BUILT_IN_CPOW):
10226 if (validate_arg (arg0, COMPLEX_TYPE)
10227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10228 && validate_arg (arg1, COMPLEX_TYPE)
10229 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10230 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10233 CASE_FLT_FN (BUILT_IN_LDEXP):
10234 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10235 CASE_FLT_FN (BUILT_IN_SCALBN):
10236 CASE_FLT_FN (BUILT_IN_SCALBLN):
10237 return fold_builtin_load_exponent (loc, arg0, arg1,
10238 type, /*ldexp=*/false);
10240 CASE_FLT_FN (BUILT_IN_FREXP):
10241 return fold_builtin_frexp (loc, arg0, arg1, type);
10243 CASE_FLT_FN (BUILT_IN_MODF):
10244 return fold_builtin_modf (loc, arg0, arg1, type);
10246 case BUILT_IN_BZERO:
10247 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10249 case BUILT_IN_FPUTS:
10250 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10252 case BUILT_IN_FPUTS_UNLOCKED:
10253 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10255 case BUILT_IN_STRSTR:
10256 return fold_builtin_strstr (loc, arg0, arg1, type);
10258 case BUILT_IN_STRCAT:
10259 return fold_builtin_strcat (loc, arg0, arg1);
10261 case BUILT_IN_STRSPN:
10262 return fold_builtin_strspn (loc, arg0, arg1);
10264 case BUILT_IN_STRCSPN:
10265 return fold_builtin_strcspn (loc, arg0, arg1);
10267 case BUILT_IN_STRCHR:
10268 case BUILT_IN_INDEX:
10269 return fold_builtin_strchr (loc, arg0, arg1, type);
10271 case BUILT_IN_STRRCHR:
10272 case BUILT_IN_RINDEX:
10273 return fold_builtin_strrchr (loc, arg0, arg1, type);
10275 case BUILT_IN_STRCPY:
10276 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10278 case BUILT_IN_STPCPY:
10281 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10285 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10288 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10291 case BUILT_IN_STRCMP:
10292 return fold_builtin_strcmp (loc, arg0, arg1);
10294 case BUILT_IN_STRPBRK:
10295 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10297 case BUILT_IN_EXPECT:
10298 return fold_builtin_expect (loc, arg0, arg1);
10300 CASE_FLT_FN (BUILT_IN_POW):
10301 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10303 CASE_FLT_FN (BUILT_IN_POWI):
10304 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10306 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10307 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10309 CASE_FLT_FN (BUILT_IN_FMIN):
10310 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10312 CASE_FLT_FN (BUILT_IN_FMAX):
10313 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10315 case BUILT_IN_ISGREATER:
10316 return fold_builtin_unordered_cmp (loc, fndecl,
10317 arg0, arg1, UNLE_EXPR, LE_EXPR);
10318 case BUILT_IN_ISGREATEREQUAL:
10319 return fold_builtin_unordered_cmp (loc, fndecl,
10320 arg0, arg1, UNLT_EXPR, LT_EXPR);
10321 case BUILT_IN_ISLESS:
10322 return fold_builtin_unordered_cmp (loc, fndecl,
10323 arg0, arg1, UNGE_EXPR, GE_EXPR);
10324 case BUILT_IN_ISLESSEQUAL:
10325 return fold_builtin_unordered_cmp (loc, fndecl,
10326 arg0, arg1, UNGT_EXPR, GT_EXPR);
10327 case BUILT_IN_ISLESSGREATER:
10328 return fold_builtin_unordered_cmp (loc, fndecl,
10329 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10330 case BUILT_IN_ISUNORDERED:
10331 return fold_builtin_unordered_cmp (loc, fndecl,
10332 arg0, arg1, UNORDERED_EXPR,
10335 /* We do the folding for va_start in the expander. */
10336 case BUILT_IN_VA_START:
10339 case BUILT_IN_SPRINTF:
10340 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10342 case BUILT_IN_OBJECT_SIZE:
10343 return fold_builtin_object_size (arg0, arg1);
10345 case BUILT_IN_PRINTF:
10346 case BUILT_IN_PRINTF_UNLOCKED:
10347 case BUILT_IN_VPRINTF:
10348 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10350 case BUILT_IN_PRINTF_CHK:
10351 case BUILT_IN_VPRINTF_CHK:
10352 if (!validate_arg (arg0, INTEGER_TYPE)
10353 || TREE_SIDE_EFFECTS (arg0))
10356 return fold_builtin_printf (loc, fndecl,
10357 arg1, NULL_TREE, ignore, fcode);
10360 case BUILT_IN_FPRINTF:
10361 case BUILT_IN_FPRINTF_UNLOCKED:
10362 case BUILT_IN_VFPRINTF:
10363 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10372 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10373 and ARG2. IGNORE is true if the result of the function call is ignored.
10374 This function returns NULL_TREE if no simplification was possible. */
10377 fold_builtin_3 (location_t loc, tree fndecl,
10378 tree arg0, tree arg1, tree arg2, bool ignore)
10380 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10381 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10385 CASE_FLT_FN (BUILT_IN_SINCOS):
10386 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10388 CASE_FLT_FN (BUILT_IN_FMA):
10389 if (validate_arg (arg0, REAL_TYPE)
10390 && validate_arg(arg1, REAL_TYPE)
10391 && validate_arg(arg2, REAL_TYPE))
10392 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10395 CASE_FLT_FN (BUILT_IN_REMQUO):
10396 if (validate_arg (arg0, REAL_TYPE)
10397 && validate_arg(arg1, REAL_TYPE)
10398 && validate_arg(arg2, POINTER_TYPE))
10399 return do_mpfr_remquo (arg0, arg1, arg2);
10402 case BUILT_IN_MEMSET:
10403 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10405 case BUILT_IN_BCOPY:
10406 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10407 void_type_node, true, /*endp=*/3);
10409 case BUILT_IN_MEMCPY:
10410 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10411 type, ignore, /*endp=*/0);
10413 case BUILT_IN_MEMPCPY:
10414 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10415 type, ignore, /*endp=*/1);
10417 case BUILT_IN_MEMMOVE:
10418 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10419 type, ignore, /*endp=*/3);
10421 case BUILT_IN_STRNCAT:
10422 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10424 case BUILT_IN_STRNCPY:
10425 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10427 case BUILT_IN_STRNCMP:
10428 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10430 case BUILT_IN_MEMCHR:
10431 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10433 case BUILT_IN_BCMP:
10434 case BUILT_IN_MEMCMP:
10435 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10437 case BUILT_IN_SPRINTF:
10438 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10440 case BUILT_IN_STRCPY_CHK:
10441 case BUILT_IN_STPCPY_CHK:
10442 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10445 case BUILT_IN_STRCAT_CHK:
10446 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10448 case BUILT_IN_PRINTF_CHK:
10449 case BUILT_IN_VPRINTF_CHK:
10450 if (!validate_arg (arg0, INTEGER_TYPE)
10451 || TREE_SIDE_EFFECTS (arg0))
10454 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10457 case BUILT_IN_FPRINTF:
10458 case BUILT_IN_FPRINTF_UNLOCKED:
10459 case BUILT_IN_VFPRINTF:
10460 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10463 case BUILT_IN_FPRINTF_CHK:
10464 case BUILT_IN_VFPRINTF_CHK:
10465 if (!validate_arg (arg1, INTEGER_TYPE)
10466 || TREE_SIDE_EFFECTS (arg1))
10469 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10478 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10479 ARG2, and ARG3. IGNORE is true if the result of the function call is
10480 ignored. This function returns NULL_TREE if no simplification was
10484 fold_builtin_4 (location_t loc, tree fndecl,
10485 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10487 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10491 case BUILT_IN_MEMCPY_CHK:
10492 case BUILT_IN_MEMPCPY_CHK:
10493 case BUILT_IN_MEMMOVE_CHK:
10494 case BUILT_IN_MEMSET_CHK:
10495 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10497 DECL_FUNCTION_CODE (fndecl));
10499 case BUILT_IN_STRNCPY_CHK:
10500 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10502 case BUILT_IN_STRNCAT_CHK:
10503 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10505 case BUILT_IN_FPRINTF_CHK:
10506 case BUILT_IN_VFPRINTF_CHK:
10507 if (!validate_arg (arg1, INTEGER_TYPE)
10508 || TREE_SIDE_EFFECTS (arg1))
10511 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10521 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10522 arguments, where NARGS <= 4. IGNORE is true if the result of the
10523 function call is ignored. This function returns NULL_TREE if no
10524 simplification was possible. Note that this only folds builtins with
10525 fixed argument patterns. Foldings that do varargs-to-varargs
10526 transformations, or that match calls with more than 4 arguments,
10527 need to be handled with fold_builtin_varargs instead. */
10529 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10532 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10534 tree ret = NULL_TREE;
10539 ret = fold_builtin_0 (loc, fndecl, ignore);
10542 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10545 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10548 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10551 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10559 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10560 SET_EXPR_LOCATION (ret, loc);
10561 TREE_NO_WARNING (ret) = 1;
10567 /* Builtins with folding operations that operate on "..." arguments
10568 need special handling; we need to store the arguments in a convenient
10569 data structure before attempting any folding. Fortunately there are
10570 only a few builtins that fall into this category. FNDECL is the
10571 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10572 result of the function call is ignored. */
10575 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10576 bool ignore ATTRIBUTE_UNUSED)
10578 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10579 tree ret = NULL_TREE;
10583 case BUILT_IN_SPRINTF_CHK:
10584 case BUILT_IN_VSPRINTF_CHK:
10585 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10588 case BUILT_IN_SNPRINTF_CHK:
10589 case BUILT_IN_VSNPRINTF_CHK:
10590 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10593 case BUILT_IN_FPCLASSIFY:
10594 ret = fold_builtin_fpclassify (loc, exp);
10602 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10603 SET_EXPR_LOCATION (ret, loc);
10604 TREE_NO_WARNING (ret) = 1;
10610 /* Return true if FNDECL shouldn't be folded right now.
10611 If a built-in function has an inline attribute always_inline
10612 wrapper, defer folding it after always_inline functions have
10613 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10614 might not be performed. */
10617 avoid_folding_inline_builtin (tree fndecl)
10619 return (DECL_DECLARED_INLINE_P (fndecl)
10620 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10622 && !cfun->always_inline_functions_inlined
10623 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10626 /* A wrapper function for builtin folding that prevents warnings for
10627 "statement without effect" and the like, caused by removing the
10628 call node earlier than the warning is generated. */
10631 fold_call_expr (location_t loc, tree exp, bool ignore)
10633 tree ret = NULL_TREE;
10634 tree fndecl = get_callee_fndecl (exp);
10636 && TREE_CODE (fndecl) == FUNCTION_DECL
10637 && DECL_BUILT_IN (fndecl)
10638 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10639 yet. Defer folding until we see all the arguments
10640 (after inlining). */
10641 && !CALL_EXPR_VA_ARG_PACK (exp))
10643 int nargs = call_expr_nargs (exp);
10645 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10646 instead last argument is __builtin_va_arg_pack (). Defer folding
10647 even in that case, until arguments are finalized. */
10648 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10650 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10652 && TREE_CODE (fndecl2) == FUNCTION_DECL
10653 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10654 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10658 if (avoid_folding_inline_builtin (fndecl))
10661 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10662 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10663 CALL_EXPR_ARGP (exp), ignore);
10666 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10668 tree *args = CALL_EXPR_ARGP (exp);
10669 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10672 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10680 /* Conveniently construct a function call expression. FNDECL names the
10681 function to be called and ARGLIST is a TREE_LIST of arguments. */
10684 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10686 tree fntype = TREE_TYPE (fndecl);
10687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10688 int n = list_length (arglist);
10689 tree *argarray = (tree *) alloca (n * sizeof (tree));
10692 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10693 argarray[i] = TREE_VALUE (arglist);
10694 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10697 /* Conveniently construct a function call expression. FNDECL names the
10698 function to be called, N is the number of arguments, and the "..."
10699 parameters are the argument expressions. */
10702 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10705 tree fntype = TREE_TYPE (fndecl);
10706 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10707 tree *argarray = (tree *) alloca (n * sizeof (tree));
10711 for (i = 0; i < n; i++)
10712 argarray[i] = va_arg (ap, tree);
10714 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10717 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10718 varargs macros aren't supported by all bootstrap compilers. */
10721 build_call_expr (tree fndecl, int n, ...)
10724 tree fntype = TREE_TYPE (fndecl);
10725 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10726 tree *argarray = (tree *) alloca (n * sizeof (tree));
10730 for (i = 0; i < n; i++)
10731 argarray[i] = va_arg (ap, tree);
10733 return fold_builtin_call_array (UNKNOWN_LOCATION, TREE_TYPE (fntype),
10737 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10738 N arguments are passed in the array ARGARRAY. */
10741 fold_builtin_call_array (location_t loc, tree type,
10746 tree ret = NULL_TREE;
10749 if (TREE_CODE (fn) == ADDR_EXPR)
10751 tree fndecl = TREE_OPERAND (fn, 0);
10752 if (TREE_CODE (fndecl) == FUNCTION_DECL
10753 && DECL_BUILT_IN (fndecl))
10755 /* If last argument is __builtin_va_arg_pack (), arguments to this
10756 function are not finalized yet. Defer folding until they are. */
10757 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10759 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10761 && TREE_CODE (fndecl2) == FUNCTION_DECL
10762 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10763 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10764 return build_call_array_loc (loc, type, fn, n, argarray);
10766 if (avoid_folding_inline_builtin (fndecl))
10767 return build_call_array_loc (loc, type, fn, n, argarray);
10768 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10770 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10774 return build_call_array_loc (loc, type, fn, n, argarray);
10776 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10778 /* First try the transformations that don't require consing up
10780 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10785 /* If we got this far, we need to build an exp. */
10786 exp = build_call_array_loc (loc, type, fn, n, argarray);
10787 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10788 return ret ? ret : exp;
10792 return build_call_array_loc (loc, type, fn, n, argarray);
10795 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10796 along with N new arguments specified as the "..." parameters. SKIP
10797 is the number of arguments in EXP to be omitted. This function is used
10798 to do varargs-to-varargs transformations. */
10801 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10803 int oldnargs = call_expr_nargs (exp);
10804 int nargs = oldnargs - skip + n;
10805 tree fntype = TREE_TYPE (fndecl);
10806 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10814 buffer = XALLOCAVEC (tree, nargs);
10816 for (i = 0; i < n; i++)
10817 buffer[i] = va_arg (ap, tree);
10819 for (j = skip; j < oldnargs; j++, i++)
10820 buffer[i] = CALL_EXPR_ARG (exp, j);
10823 buffer = CALL_EXPR_ARGP (exp) + skip;
10825 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10828 /* Validate a single argument ARG against a tree code CODE representing
10832 validate_arg (const_tree arg, enum tree_code code)
10836 else if (code == POINTER_TYPE)
10837 return POINTER_TYPE_P (TREE_TYPE (arg));
10838 else if (code == INTEGER_TYPE)
10839 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10840 return code == TREE_CODE (TREE_TYPE (arg));
10843 /* This function validates the types of a function call argument list
10844 against a specified list of tree_codes. If the last specifier is a 0,
10845 that represents an ellipses, otherwise the last specifier must be a
10848 This is the GIMPLE version of validate_arglist. Eventually we want to
10849 completely convert builtins.c to work from GIMPLEs and the tree based
10850 validate_arglist will then be removed. */
10853 validate_gimple_arglist (const_gimple call, ...)
10855 enum tree_code code;
10861 va_start (ap, call);
10866 code = (enum tree_code) va_arg (ap, int);
10870 /* This signifies an ellipses, any further arguments are all ok. */
10874 /* This signifies an endlink, if no arguments remain, return
10875 true, otherwise return false. */
10876 res = (i == gimple_call_num_args (call));
10879 /* If no parameters remain or the parameter's code does not
10880 match the specified code, return false. Otherwise continue
10881 checking any remaining arguments. */
10882 arg = gimple_call_arg (call, i++);
10883 if (!validate_arg (arg, code))
10890 /* We need gotos here since we can only have one VA_CLOSE in a
10898 /* This function validates the types of a function call argument list
10899 against a specified list of tree_codes. If the last specifier is a 0,
10900 that represents an ellipses, otherwise the last specifier must be a
10904 validate_arglist (const_tree callexpr, ...)
10906 enum tree_code code;
10909 const_call_expr_arg_iterator iter;
10912 va_start (ap, callexpr);
10913 init_const_call_expr_arg_iterator (callexpr, &iter);
10917 code = (enum tree_code) va_arg (ap, int);
10921 /* This signifies an ellipses, any further arguments are all ok. */
10925 /* This signifies an endlink, if no arguments remain, return
10926 true, otherwise return false. */
10927 res = !more_const_call_expr_args_p (&iter);
10930 /* If no parameters remain or the parameter's code does not
10931 match the specified code, return false. Otherwise continue
10932 checking any remaining arguments. */
10933 arg = next_const_call_expr_arg (&iter);
10934 if (!validate_arg (arg, code))
10941 /* We need gotos here since we can only have one VA_CLOSE in a
10949 /* Default target-specific builtin expander that does nothing. */
10952 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10953 rtx target ATTRIBUTE_UNUSED,
10954 rtx subtarget ATTRIBUTE_UNUSED,
10955 enum machine_mode mode ATTRIBUTE_UNUSED,
10956 int ignore ATTRIBUTE_UNUSED)
10961 /* Returns true is EXP represents data that would potentially reside
10962 in a readonly section. */
10965 readonly_data_expr (tree exp)
10969 if (TREE_CODE (exp) != ADDR_EXPR)
10972 exp = get_base_address (TREE_OPERAND (exp, 0));
10976 /* Make sure we call decl_readonly_section only for trees it
10977 can handle (since it returns true for everything it doesn't
10979 if (TREE_CODE (exp) == STRING_CST
10980 || TREE_CODE (exp) == CONSTRUCTOR
10981 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10982 return decl_readonly_section (exp, 0);
10987 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10988 to the call, and TYPE is its return type.
10990 Return NULL_TREE if no simplification was possible, otherwise return the
10991 simplified form of the call as a tree.
10993 The simplified form may be a constant or other expression which
10994 computes the same value, but in a more efficient manner (including
10995 calls to other builtin functions).
10997 The call may contain arguments which need to be evaluated, but
10998 which are not useful to determine the result of the call. In
10999 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11000 COMPOUND_EXPR will be an argument which must be evaluated.
11001 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11002 COMPOUND_EXPR in the chain will contain the tree for the simplified
11003 form of the builtin function call. */
11006 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11008 if (!validate_arg (s1, POINTER_TYPE)
11009 || !validate_arg (s2, POINTER_TYPE))
11014 const char *p1, *p2;
11016 p2 = c_getstr (s2);
11020 p1 = c_getstr (s1);
11023 const char *r = strstr (p1, p2);
11027 return build_int_cst (TREE_TYPE (s1), 0);
11029 /* Return an offset into the constant string argument. */
11030 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11031 s1, size_int (r - p1));
11032 return fold_convert_loc (loc, type, tem);
11035 /* The argument is const char *, and the result is char *, so we need
11036 a type conversion here to avoid a warning. */
11038 return fold_convert_loc (loc, type, s1);
11043 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11047 /* New argument list transforming strstr(s1, s2) to
11048 strchr(s1, s2[0]). */
11049 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11053 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11054 the call, and TYPE is its return type.
11056 Return NULL_TREE if no simplification was possible, otherwise return the
11057 simplified form of the call as a tree.
11059 The simplified form may be a constant or other expression which
11060 computes the same value, but in a more efficient manner (including
11061 calls to other builtin functions).
11063 The call may contain arguments which need to be evaluated, but
11064 which are not useful to determine the result of the call. In
11065 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11066 COMPOUND_EXPR will be an argument which must be evaluated.
11067 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11068 COMPOUND_EXPR in the chain will contain the tree for the simplified
11069 form of the builtin function call. */
11072 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11074 if (!validate_arg (s1, POINTER_TYPE)
11075 || !validate_arg (s2, INTEGER_TYPE))
11081 if (TREE_CODE (s2) != INTEGER_CST)
11084 p1 = c_getstr (s1);
11091 if (target_char_cast (s2, &c))
11094 r = strchr (p1, c);
11097 return build_int_cst (TREE_TYPE (s1), 0);
11099 /* Return an offset into the constant string argument. */
11100 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11101 s1, size_int (r - p1));
11102 return fold_convert_loc (loc, type, tem);
11108 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11109 the call, and TYPE is its return type.
11111 Return NULL_TREE if no simplification was possible, otherwise return the
11112 simplified form of the call as a tree.
11114 The simplified form may be a constant or other expression which
11115 computes the same value, but in a more efficient manner (including
11116 calls to other builtin functions).
11118 The call may contain arguments which need to be evaluated, but
11119 which are not useful to determine the result of the call. In
11120 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11121 COMPOUND_EXPR will be an argument which must be evaluated.
11122 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11123 COMPOUND_EXPR in the chain will contain the tree for the simplified
11124 form of the builtin function call. */
11127 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11129 if (!validate_arg (s1, POINTER_TYPE)
11130 || !validate_arg (s2, INTEGER_TYPE))
11137 if (TREE_CODE (s2) != INTEGER_CST)
11140 p1 = c_getstr (s1);
11147 if (target_char_cast (s2, &c))
11150 r = strrchr (p1, c);
11153 return build_int_cst (TREE_TYPE (s1), 0);
11155 /* Return an offset into the constant string argument. */
11156 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11157 s1, size_int (r - p1));
11158 return fold_convert_loc (loc, type, tem);
11161 if (! integer_zerop (s2))
11164 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11168 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11169 return build_call_expr_loc (loc, fn, 2, s1, s2);
11173 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11174 to the call, and TYPE is its return type.
11176 Return NULL_TREE if no simplification was possible, otherwise return the
11177 simplified form of the call as a tree.
11179 The simplified form may be a constant or other expression which
11180 computes the same value, but in a more efficient manner (including
11181 calls to other builtin functions).
11183 The call may contain arguments which need to be evaluated, but
11184 which are not useful to determine the result of the call. In
11185 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11186 COMPOUND_EXPR will be an argument which must be evaluated.
11187 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11188 COMPOUND_EXPR in the chain will contain the tree for the simplified
11189 form of the builtin function call. */
11192 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11194 if (!validate_arg (s1, POINTER_TYPE)
11195 || !validate_arg (s2, POINTER_TYPE))
11200 const char *p1, *p2;
11202 p2 = c_getstr (s2);
11206 p1 = c_getstr (s1);
11209 const char *r = strpbrk (p1, p2);
11213 return build_int_cst (TREE_TYPE (s1), 0);
11215 /* Return an offset into the constant string argument. */
11216 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11217 s1, size_int (r - p1));
11218 return fold_convert_loc (loc, type, tem);
11222 /* strpbrk(x, "") == NULL.
11223 Evaluate and ignore s1 in case it had side-effects. */
11224 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11227 return NULL_TREE; /* Really call strpbrk. */
11229 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11233 /* New argument list transforming strpbrk(s1, s2) to
11234 strchr(s1, s2[0]). */
11235 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11239 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11242 Return NULL_TREE if no simplification was possible, otherwise return the
11243 simplified form of the call as a tree.
11245 The simplified form may be a constant or other expression which
11246 computes the same value, but in a more efficient manner (including
11247 calls to other builtin functions).
11249 The call may contain arguments which need to be evaluated, but
11250 which are not useful to determine the result of the call. In
11251 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11252 COMPOUND_EXPR will be an argument which must be evaluated.
11253 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11254 COMPOUND_EXPR in the chain will contain the tree for the simplified
11255 form of the builtin function call. */
11258 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11260 if (!validate_arg (dst, POINTER_TYPE)
11261 || !validate_arg (src, POINTER_TYPE))
11265 const char *p = c_getstr (src);
11267 /* If the string length is zero, return the dst parameter. */
11268 if (p && *p == '\0')
11271 if (optimize_insn_for_speed_p ())
11273 /* See if we can store by pieces into (dst + strlen(dst)). */
11275 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11276 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11278 if (!strlen_fn || !strcpy_fn)
11281 /* If we don't have a movstr we don't want to emit an strcpy
11282 call. We have to do that if the length of the source string
11283 isn't computable (in that case we can use memcpy probably
11284 later expanding to a sequence of mov instructions). If we
11285 have movstr instructions we can emit strcpy calls. */
11288 tree len = c_strlen (src, 1);
11289 if (! len || TREE_SIDE_EFFECTS (len))
11293 /* Stabilize the argument list. */
11294 dst = builtin_save_expr (dst);
11296 /* Create strlen (dst). */
11297 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11298 /* Create (dst p+ strlen (dst)). */
11300 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11301 TREE_TYPE (dst), dst, newdst);
11302 newdst = builtin_save_expr (newdst);
11304 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11305 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11311 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11312 arguments to the call.
11314 Return NULL_TREE if no simplification was possible, otherwise return the
11315 simplified form of the call as a tree.
11317 The simplified form may be a constant or other expression which
11318 computes the same value, but in a more efficient manner (including
11319 calls to other builtin functions).
11321 The call may contain arguments which need to be evaluated, but
11322 which are not useful to determine the result of the call. In
11323 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11324 COMPOUND_EXPR will be an argument which must be evaluated.
11325 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11326 COMPOUND_EXPR in the chain will contain the tree for the simplified
11327 form of the builtin function call. */
11330 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11332 if (!validate_arg (dst, POINTER_TYPE)
11333 || !validate_arg (src, POINTER_TYPE)
11334 || !validate_arg (len, INTEGER_TYPE))
11338 const char *p = c_getstr (src);
11340 /* If the requested length is zero, or the src parameter string
11341 length is zero, return the dst parameter. */
11342 if (integer_zerop (len) || (p && *p == '\0'))
11343 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11345 /* If the requested len is greater than or equal to the string
11346 length, call strcat. */
11347 if (TREE_CODE (len) == INTEGER_CST && p
11348 && compare_tree_int (len, strlen (p)) >= 0)
11350 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11352 /* If the replacement _DECL isn't initialized, don't do the
11357 return build_call_expr_loc (loc, fn, 2, dst, src);
11363 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11366 Return NULL_TREE if no simplification was possible, otherwise return the
11367 simplified form of the call as a tree.
11369 The simplified form may be a constant or other expression which
11370 computes the same value, but in a more efficient manner (including
11371 calls to other builtin functions).
11373 The call may contain arguments which need to be evaluated, but
11374 which are not useful to determine the result of the call. In
11375 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11376 COMPOUND_EXPR will be an argument which must be evaluated.
11377 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11378 COMPOUND_EXPR in the chain will contain the tree for the simplified
11379 form of the builtin function call. */
11382 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11384 if (!validate_arg (s1, POINTER_TYPE)
11385 || !validate_arg (s2, POINTER_TYPE))
11389 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11391 /* If both arguments are constants, evaluate at compile-time. */
11394 const size_t r = strspn (p1, p2);
11395 return size_int (r);
11398 /* If either argument is "", return NULL_TREE. */
11399 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11400 /* Evaluate and ignore both arguments in case either one has
11402 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11408 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11411 Return NULL_TREE if no simplification was possible, otherwise return the
11412 simplified form of the call as a tree.
11414 The simplified form may be a constant or other expression which
11415 computes the same value, but in a more efficient manner (including
11416 calls to other builtin functions).
11418 The call may contain arguments which need to be evaluated, but
11419 which are not useful to determine the result of the call. In
11420 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11421 COMPOUND_EXPR will be an argument which must be evaluated.
11422 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11423 COMPOUND_EXPR in the chain will contain the tree for the simplified
11424 form of the builtin function call. */
11427 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11429 if (!validate_arg (s1, POINTER_TYPE)
11430 || !validate_arg (s2, POINTER_TYPE))
11434 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11436 /* If both arguments are constants, evaluate at compile-time. */
11439 const size_t r = strcspn (p1, p2);
11440 return size_int (r);
11443 /* If the first argument is "", return NULL_TREE. */
11444 if (p1 && *p1 == '\0')
11446 /* Evaluate and ignore argument s2 in case it has
11448 return omit_one_operand_loc (loc, size_type_node,
11449 size_zero_node, s2);
11452 /* If the second argument is "", return __builtin_strlen(s1). */
11453 if (p2 && *p2 == '\0')
11455 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11457 /* If the replacement _DECL isn't initialized, don't do the
11462 return build_call_expr_loc (loc, fn, 1, s1);
11468 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11469 to the call. IGNORE is true if the value returned
11470 by the builtin will be ignored. UNLOCKED is true is true if this
11471 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11472 the known length of the string. Return NULL_TREE if no simplification
11476 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11477 bool ignore, bool unlocked, tree len)
11479 /* If we're using an unlocked function, assume the other unlocked
11480 functions exist explicitly. */
11481 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11482 : implicit_built_in_decls[BUILT_IN_FPUTC];
11483 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11484 : implicit_built_in_decls[BUILT_IN_FWRITE];
11486 /* If the return value is used, don't do the transformation. */
11490 /* Verify the arguments in the original call. */
11491 if (!validate_arg (arg0, POINTER_TYPE)
11492 || !validate_arg (arg1, POINTER_TYPE))
11496 len = c_strlen (arg0, 0);
11498 /* Get the length of the string passed to fputs. If the length
11499 can't be determined, punt. */
11501 || TREE_CODE (len) != INTEGER_CST)
11504 switch (compare_tree_int (len, 1))
11506 case -1: /* length is 0, delete the call entirely . */
11507 return omit_one_operand_loc (loc, integer_type_node,
11508 integer_zero_node, arg1);;
11510 case 0: /* length is 1, call fputc. */
11512 const char *p = c_getstr (arg0);
11517 return build_call_expr_loc (loc, fn_fputc, 2,
11518 build_int_cst (NULL_TREE, p[0]), arg1);
11524 case 1: /* length is greater than 1, call fwrite. */
11526 /* If optimizing for size keep fputs. */
11527 if (optimize_function_for_size_p (cfun))
11529 /* New argument list transforming fputs(string, stream) to
11530 fwrite(string, 1, len, stream). */
11532 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11533 size_one_node, len, arg1);
11538 gcc_unreachable ();
11543 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11544 produced. False otherwise. This is done so that we don't output the error
11545 or warning twice or three times. */
11548 fold_builtin_next_arg (tree exp, bool va_start_p)
11550 tree fntype = TREE_TYPE (current_function_decl);
11551 int nargs = call_expr_nargs (exp);
11554 if (TYPE_ARG_TYPES (fntype) == 0
11555 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11556 == void_type_node))
11558 error ("%<va_start%> used in function with fixed args");
11564 if (va_start_p && (nargs != 2))
11566 error ("wrong number of arguments to function %<va_start%>");
11569 arg = CALL_EXPR_ARG (exp, 1);
11571 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11572 when we checked the arguments and if needed issued a warning. */
11577 /* Evidently an out of date version of <stdarg.h>; can't validate
11578 va_start's second argument, but can still work as intended. */
11579 warning (0, "%<__builtin_next_arg%> called without an argument");
11582 else if (nargs > 1)
11584 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11587 arg = CALL_EXPR_ARG (exp, 0);
11590 if (TREE_CODE (arg) == SSA_NAME)
11591 arg = SSA_NAME_VAR (arg);
11593 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11594 or __builtin_next_arg (0) the first time we see it, after checking
11595 the arguments and if needed issuing a warning. */
11596 if (!integer_zerop (arg))
11598 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11600 /* Strip off all nops for the sake of the comparison. This
11601 is not quite the same as STRIP_NOPS. It does more.
11602 We must also strip off INDIRECT_EXPR for C++ reference
11604 while (CONVERT_EXPR_P (arg)
11605 || TREE_CODE (arg) == INDIRECT_REF)
11606 arg = TREE_OPERAND (arg, 0);
11607 if (arg != last_parm)
11609 /* FIXME: Sometimes with the tree optimizers we can get the
11610 not the last argument even though the user used the last
11611 argument. We just warn and set the arg to be the last
11612 argument so that we will get wrong-code because of
11614 warning (0, "second parameter of %<va_start%> not last named argument");
11617 /* Undefined by C99 7.15.1.4p4 (va_start):
11618 "If the parameter parmN is declared with the register storage
11619 class, with a function or array type, or with a type that is
11620 not compatible with the type that results after application of
11621 the default argument promotions, the behavior is undefined."
11623 else if (DECL_REGISTER (arg))
11624 warning (0, "undefined behaviour when second parameter of "
11625 "%<va_start%> is declared with %<register%> storage");
11627 /* We want to verify the second parameter just once before the tree
11628 optimizers are run and then avoid keeping it in the tree,
11629 as otherwise we could warn even for correct code like:
11630 void foo (int i, ...)
11631 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11633 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11635 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11641 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11642 ORIG may be null if this is a 2-argument call. We don't attempt to
11643 simplify calls with more than 3 arguments.
11645 Return NULL_TREE if no simplification was possible, otherwise return the
11646 simplified form of the call as a tree. If IGNORED is true, it means that
11647 the caller does not use the returned value of the function. */
11650 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11651 tree orig, int ignored)
11654 const char *fmt_str = NULL;
11656 /* Verify the required arguments in the original call. We deal with two
11657 types of sprintf() calls: 'sprintf (str, fmt)' and
11658 'sprintf (dest, "%s", orig)'. */
11659 if (!validate_arg (dest, POINTER_TYPE)
11660 || !validate_arg (fmt, POINTER_TYPE))
11662 if (orig && !validate_arg (orig, POINTER_TYPE))
11665 /* Check whether the format is a literal string constant. */
11666 fmt_str = c_getstr (fmt);
11667 if (fmt_str == NULL)
11671 retval = NULL_TREE;
11673 if (!init_target_chars ())
11676 /* If the format doesn't contain % args or %%, use strcpy. */
11677 if (strchr (fmt_str, target_percent) == NULL)
11679 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11684 /* Don't optimize sprintf (buf, "abc", ptr++). */
11688 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11689 'format' is known to contain no % formats. */
11690 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11692 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11695 /* If the format is "%s", use strcpy if the result isn't used. */
11696 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11699 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11704 /* Don't crash on sprintf (str1, "%s"). */
11708 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11711 retval = c_strlen (orig, 1);
11712 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11715 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11718 if (call && retval)
11720 retval = fold_convert_loc
11721 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11723 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11729 /* Expand a call EXP to __builtin_object_size. */
11732 expand_builtin_object_size (tree exp)
11735 int object_size_type;
11736 tree fndecl = get_callee_fndecl (exp);
11738 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11740 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11742 expand_builtin_trap ();
11746 ost = CALL_EXPR_ARG (exp, 1);
11749 if (TREE_CODE (ost) != INTEGER_CST
11750 || tree_int_cst_sgn (ost) < 0
11751 || compare_tree_int (ost, 3) > 0)
11753 error ("%Klast argument of %D is not integer constant between 0 and 3",
11755 expand_builtin_trap ();
11759 object_size_type = tree_low_cst (ost, 0);
11761 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11764 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11765 FCODE is the BUILT_IN_* to use.
11766 Return NULL_RTX if we failed; the caller should emit a normal call,
11767 otherwise try to get the result in TARGET, if convenient (and in
11768 mode MODE if that's convenient). */
11771 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11772 enum built_in_function fcode)
11774 tree dest, src, len, size;
11776 if (!validate_arglist (exp,
11778 fcode == BUILT_IN_MEMSET_CHK
11779 ? INTEGER_TYPE : POINTER_TYPE,
11780 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11783 dest = CALL_EXPR_ARG (exp, 0);
11784 src = CALL_EXPR_ARG (exp, 1);
11785 len = CALL_EXPR_ARG (exp, 2);
11786 size = CALL_EXPR_ARG (exp, 3);
11788 if (! host_integerp (size, 1))
11791 if (host_integerp (len, 1) || integer_all_onesp (size))
11795 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11797 warning_at (tree_nonartificial_location (exp),
11798 0, "%Kcall to %D will always overflow destination buffer",
11799 exp, get_callee_fndecl (exp));
11804 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11805 mem{cpy,pcpy,move,set} is available. */
11808 case BUILT_IN_MEMCPY_CHK:
11809 fn = built_in_decls[BUILT_IN_MEMCPY];
11811 case BUILT_IN_MEMPCPY_CHK:
11812 fn = built_in_decls[BUILT_IN_MEMPCPY];
11814 case BUILT_IN_MEMMOVE_CHK:
11815 fn = built_in_decls[BUILT_IN_MEMMOVE];
11817 case BUILT_IN_MEMSET_CHK:
11818 fn = built_in_decls[BUILT_IN_MEMSET];
11827 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11828 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11829 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11830 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11832 else if (fcode == BUILT_IN_MEMSET_CHK)
11836 unsigned int dest_align
11837 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11839 /* If DEST is not a pointer type, call the normal function. */
11840 if (dest_align == 0)
11843 /* If SRC and DEST are the same (and not volatile), do nothing. */
11844 if (operand_equal_p (src, dest, 0))
11848 if (fcode != BUILT_IN_MEMPCPY_CHK)
11850 /* Evaluate and ignore LEN in case it has side-effects. */
11851 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11852 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11855 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11856 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11859 /* __memmove_chk special case. */
11860 if (fcode == BUILT_IN_MEMMOVE_CHK)
11862 unsigned int src_align
11863 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11865 if (src_align == 0)
11868 /* If src is categorized for a readonly section we can use
11869 normal __memcpy_chk. */
11870 if (readonly_data_expr (src))
11872 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11875 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11876 dest, src, len, size);
11877 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11878 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11879 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11886 /* Emit warning if a buffer overflow is detected at compile time. */
11889 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11893 location_t loc = tree_nonartificial_location (exp);
11897 case BUILT_IN_STRCPY_CHK:
11898 case BUILT_IN_STPCPY_CHK:
11899 /* For __strcat_chk the warning will be emitted only if overflowing
11900 by at least strlen (dest) + 1 bytes. */
11901 case BUILT_IN_STRCAT_CHK:
11902 len = CALL_EXPR_ARG (exp, 1);
11903 size = CALL_EXPR_ARG (exp, 2);
11906 case BUILT_IN_STRNCAT_CHK:
11907 case BUILT_IN_STRNCPY_CHK:
11908 len = CALL_EXPR_ARG (exp, 2);
11909 size = CALL_EXPR_ARG (exp, 3);
11911 case BUILT_IN_SNPRINTF_CHK:
11912 case BUILT_IN_VSNPRINTF_CHK:
11913 len = CALL_EXPR_ARG (exp, 1);
11914 size = CALL_EXPR_ARG (exp, 3);
11917 gcc_unreachable ();
11923 if (! host_integerp (size, 1) || integer_all_onesp (size))
11928 len = c_strlen (len, 1);
11929 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11932 else if (fcode == BUILT_IN_STRNCAT_CHK)
11934 tree src = CALL_EXPR_ARG (exp, 1);
11935 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11937 src = c_strlen (src, 1);
11938 if (! src || ! host_integerp (src, 1))
11940 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11941 exp, get_callee_fndecl (exp));
11944 else if (tree_int_cst_lt (src, size))
11947 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11950 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11951 exp, get_callee_fndecl (exp));
11954 /* Emit warning if a buffer overflow is detected at compile time
11955 in __sprintf_chk/__vsprintf_chk calls. */
11958 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11960 tree size, len, fmt;
11961 const char *fmt_str;
11962 int nargs = call_expr_nargs (exp);
11964 /* Verify the required arguments in the original call. */
11968 size = CALL_EXPR_ARG (exp, 2);
11969 fmt = CALL_EXPR_ARG (exp, 3);
11971 if (! host_integerp (size, 1) || integer_all_onesp (size))
11974 /* Check whether the format is a literal string constant. */
11975 fmt_str = c_getstr (fmt);
11976 if (fmt_str == NULL)
11979 if (!init_target_chars ())
11982 /* If the format doesn't contain % args or %%, we know its size. */
11983 if (strchr (fmt_str, target_percent) == 0)
11984 len = build_int_cstu (size_type_node, strlen (fmt_str));
11985 /* If the format is "%s" and first ... argument is a string literal,
11987 else if (fcode == BUILT_IN_SPRINTF_CHK
11988 && strcmp (fmt_str, target_percent_s) == 0)
11994 arg = CALL_EXPR_ARG (exp, 4);
11995 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11998 len = c_strlen (arg, 1);
11999 if (!len || ! host_integerp (len, 1))
12005 if (! tree_int_cst_lt (len, size))
12006 warning_at (tree_nonartificial_location (exp),
12007 0, "%Kcall to %D will always overflow destination buffer",
12008 exp, get_callee_fndecl (exp));
12011 /* Emit warning if a free is called with address of a variable. */
12014 maybe_emit_free_warning (tree exp)
12016 tree arg = CALL_EXPR_ARG (exp, 0);
12019 if (TREE_CODE (arg) != ADDR_EXPR)
12022 arg = get_base_address (TREE_OPERAND (arg, 0));
12023 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12026 if (SSA_VAR_P (arg))
12027 warning_at (tree_nonartificial_location (exp),
12028 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12030 warning_at (tree_nonartificial_location (exp),
12031 0, "%Kattempt to free a non-heap object", exp);
12034 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12038 fold_builtin_object_size (tree ptr, tree ost)
12040 tree ret = NULL_TREE;
12041 int object_size_type;
12043 if (!validate_arg (ptr, POINTER_TYPE)
12044 || !validate_arg (ost, INTEGER_TYPE))
12049 if (TREE_CODE (ost) != INTEGER_CST
12050 || tree_int_cst_sgn (ost) < 0
12051 || compare_tree_int (ost, 3) > 0)
12054 object_size_type = tree_low_cst (ost, 0);
12056 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12057 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12058 and (size_t) 0 for types 2 and 3. */
12059 if (TREE_SIDE_EFFECTS (ptr))
12060 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12062 if (TREE_CODE (ptr) == ADDR_EXPR)
12063 ret = build_int_cstu (size_type_node,
12064 compute_builtin_object_size (ptr, object_size_type));
12066 else if (TREE_CODE (ptr) == SSA_NAME)
12068 unsigned HOST_WIDE_INT bytes;
12070 /* If object size is not known yet, delay folding until
12071 later. Maybe subsequent passes will help determining
12073 bytes = compute_builtin_object_size (ptr, object_size_type);
12074 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12076 ret = build_int_cstu (size_type_node, bytes);
12081 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12082 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12083 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12090 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12091 DEST, SRC, LEN, and SIZE are the arguments to the call.
12092 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12093 code of the builtin. If MAXLEN is not NULL, it is maximum length
12094 passed as third argument. */
12097 fold_builtin_memory_chk (location_t loc, tree fndecl,
12098 tree dest, tree src, tree len, tree size,
12099 tree maxlen, bool ignore,
12100 enum built_in_function fcode)
12104 if (!validate_arg (dest, POINTER_TYPE)
12105 || !validate_arg (src,
12106 (fcode == BUILT_IN_MEMSET_CHK
12107 ? INTEGER_TYPE : POINTER_TYPE))
12108 || !validate_arg (len, INTEGER_TYPE)
12109 || !validate_arg (size, INTEGER_TYPE))
12112 /* If SRC and DEST are the same (and not volatile), return DEST
12113 (resp. DEST+LEN for __mempcpy_chk). */
12114 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12116 if (fcode != BUILT_IN_MEMPCPY_CHK)
12117 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12121 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12123 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12127 if (! host_integerp (size, 1))
12130 if (! integer_all_onesp (size))
12132 if (! host_integerp (len, 1))
12134 /* If LEN is not constant, try MAXLEN too.
12135 For MAXLEN only allow optimizing into non-_ocs function
12136 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12137 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12139 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12141 /* (void) __mempcpy_chk () can be optimized into
12142 (void) __memcpy_chk (). */
12143 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12147 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12155 if (tree_int_cst_lt (size, maxlen))
12160 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12161 mem{cpy,pcpy,move,set} is available. */
12164 case BUILT_IN_MEMCPY_CHK:
12165 fn = built_in_decls[BUILT_IN_MEMCPY];
12167 case BUILT_IN_MEMPCPY_CHK:
12168 fn = built_in_decls[BUILT_IN_MEMPCPY];
12170 case BUILT_IN_MEMMOVE_CHK:
12171 fn = built_in_decls[BUILT_IN_MEMMOVE];
12173 case BUILT_IN_MEMSET_CHK:
12174 fn = built_in_decls[BUILT_IN_MEMSET];
12183 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12186 /* Fold a call to the __st[rp]cpy_chk builtin.
12187 DEST, SRC, and SIZE are the arguments to the call.
12188 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12189 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12190 strings passed as second argument. */
12193 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12194 tree src, tree size,
12195 tree maxlen, bool ignore,
12196 enum built_in_function fcode)
12200 if (!validate_arg (dest, POINTER_TYPE)
12201 || !validate_arg (src, POINTER_TYPE)
12202 || !validate_arg (size, INTEGER_TYPE))
12205 /* If SRC and DEST are the same (and not volatile), return DEST. */
12206 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12207 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12209 if (! host_integerp (size, 1))
12212 if (! integer_all_onesp (size))
12214 len = c_strlen (src, 1);
12215 if (! len || ! host_integerp (len, 1))
12217 /* If LEN is not constant, try MAXLEN too.
12218 For MAXLEN only allow optimizing into non-_ocs function
12219 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12220 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12222 if (fcode == BUILT_IN_STPCPY_CHK)
12227 /* If return value of __stpcpy_chk is ignored,
12228 optimize into __strcpy_chk. */
12229 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12233 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12236 if (! len || TREE_SIDE_EFFECTS (len))
12239 /* If c_strlen returned something, but not a constant,
12240 transform __strcpy_chk into __memcpy_chk. */
12241 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12245 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12246 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12247 build_call_expr_loc (loc, fn, 4,
12248 dest, src, len, size));
12254 if (! tree_int_cst_lt (maxlen, size))
12258 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12259 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12260 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12264 return build_call_expr_loc (loc, fn, 2, dest, src);
12267 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12268 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12269 length passed as third argument. */
12272 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12273 tree len, tree size, tree maxlen)
12277 if (!validate_arg (dest, POINTER_TYPE)
12278 || !validate_arg (src, POINTER_TYPE)
12279 || !validate_arg (len, INTEGER_TYPE)
12280 || !validate_arg (size, INTEGER_TYPE))
12283 if (! host_integerp (size, 1))
12286 if (! integer_all_onesp (size))
12288 if (! host_integerp (len, 1))
12290 /* If LEN is not constant, try MAXLEN too.
12291 For MAXLEN only allow optimizing into non-_ocs function
12292 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12293 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12299 if (tree_int_cst_lt (size, maxlen))
12303 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12304 fn = built_in_decls[BUILT_IN_STRNCPY];
12308 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12311 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12312 are the arguments to the call. */
12315 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12316 tree src, tree size)
12321 if (!validate_arg (dest, POINTER_TYPE)
12322 || !validate_arg (src, POINTER_TYPE)
12323 || !validate_arg (size, INTEGER_TYPE))
12326 p = c_getstr (src);
12327 /* If the SRC parameter is "", return DEST. */
12328 if (p && *p == '\0')
12329 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12331 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12334 /* If __builtin_strcat_chk is used, assume strcat is available. */
12335 fn = built_in_decls[BUILT_IN_STRCAT];
12339 return build_call_expr_loc (loc, fn, 2, dest, src);
12342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12346 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12347 tree dest, tree src, tree len, tree size)
12352 if (!validate_arg (dest, POINTER_TYPE)
12353 || !validate_arg (src, POINTER_TYPE)
12354 || !validate_arg (size, INTEGER_TYPE)
12355 || !validate_arg (size, INTEGER_TYPE))
12358 p = c_getstr (src);
12359 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12360 if (p && *p == '\0')
12361 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12362 else if (integer_zerop (len))
12363 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12365 if (! host_integerp (size, 1))
12368 if (! integer_all_onesp (size))
12370 tree src_len = c_strlen (src, 1);
12372 && host_integerp (src_len, 1)
12373 && host_integerp (len, 1)
12374 && ! tree_int_cst_lt (len, src_len))
12376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12377 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12381 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12386 /* If __builtin_strncat_chk is used, assume strncat is available. */
12387 fn = built_in_decls[BUILT_IN_STRNCAT];
12391 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12394 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12395 a normal call should be emitted rather than expanding the function
12396 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12399 fold_builtin_sprintf_chk (location_t loc, tree exp,
12400 enum built_in_function fcode)
12402 tree dest, size, len, fn, fmt, flag;
12403 const char *fmt_str;
12404 int nargs = call_expr_nargs (exp);
12406 /* Verify the required arguments in the original call. */
12409 dest = CALL_EXPR_ARG (exp, 0);
12410 if (!validate_arg (dest, POINTER_TYPE))
12412 flag = CALL_EXPR_ARG (exp, 1);
12413 if (!validate_arg (flag, INTEGER_TYPE))
12415 size = CALL_EXPR_ARG (exp, 2);
12416 if (!validate_arg (size, INTEGER_TYPE))
12418 fmt = CALL_EXPR_ARG (exp, 3);
12419 if (!validate_arg (fmt, POINTER_TYPE))
12422 if (! host_integerp (size, 1))
12427 if (!init_target_chars ())
12430 /* Check whether the format is a literal string constant. */
12431 fmt_str = c_getstr (fmt);
12432 if (fmt_str != NULL)
12434 /* If the format doesn't contain % args or %%, we know the size. */
12435 if (strchr (fmt_str, target_percent) == 0)
12437 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12438 len = build_int_cstu (size_type_node, strlen (fmt_str));
12440 /* If the format is "%s" and first ... argument is a string literal,
12441 we know the size too. */
12442 else if (fcode == BUILT_IN_SPRINTF_CHK
12443 && strcmp (fmt_str, target_percent_s) == 0)
12449 arg = CALL_EXPR_ARG (exp, 4);
12450 if (validate_arg (arg, POINTER_TYPE))
12452 len = c_strlen (arg, 1);
12453 if (! len || ! host_integerp (len, 1))
12460 if (! integer_all_onesp (size))
12462 if (! len || ! tree_int_cst_lt (len, size))
12466 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12467 or if format doesn't contain % chars or is "%s". */
12468 if (! integer_zerop (flag))
12470 if (fmt_str == NULL)
12472 if (strchr (fmt_str, target_percent) != NULL
12473 && strcmp (fmt_str, target_percent_s))
12477 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12478 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12479 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12483 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12486 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12487 a normal call should be emitted rather than expanding the function
12488 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12489 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12490 passed as second argument. */
12493 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12494 enum built_in_function fcode)
12496 tree dest, size, len, fn, fmt, flag;
12497 const char *fmt_str;
12499 /* Verify the required arguments in the original call. */
12500 if (call_expr_nargs (exp) < 5)
12502 dest = CALL_EXPR_ARG (exp, 0);
12503 if (!validate_arg (dest, POINTER_TYPE))
12505 len = CALL_EXPR_ARG (exp, 1);
12506 if (!validate_arg (len, INTEGER_TYPE))
12508 flag = CALL_EXPR_ARG (exp, 2);
12509 if (!validate_arg (flag, INTEGER_TYPE))
12511 size = CALL_EXPR_ARG (exp, 3);
12512 if (!validate_arg (size, INTEGER_TYPE))
12514 fmt = CALL_EXPR_ARG (exp, 4);
12515 if (!validate_arg (fmt, POINTER_TYPE))
12518 if (! host_integerp (size, 1))
12521 if (! integer_all_onesp (size))
12523 if (! host_integerp (len, 1))
12525 /* If LEN is not constant, try MAXLEN too.
12526 For MAXLEN only allow optimizing into non-_ocs function
12527 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12528 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12534 if (tree_int_cst_lt (size, maxlen))
12538 if (!init_target_chars ())
12541 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12542 or if format doesn't contain % chars or is "%s". */
12543 if (! integer_zerop (flag))
12545 fmt_str = c_getstr (fmt);
12546 if (fmt_str == NULL)
12548 if (strchr (fmt_str, target_percent) != NULL
12549 && strcmp (fmt_str, target_percent_s))
12553 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12555 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12556 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12560 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12563 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12564 FMT and ARG are the arguments to the call; we don't fold cases with
12565 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12567 Return NULL_TREE if no simplification was possible, otherwise return the
12568 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12569 code of the function to be simplified. */
12572 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12573 tree arg, bool ignore,
12574 enum built_in_function fcode)
12576 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12577 const char *fmt_str = NULL;
12579 /* If the return value is used, don't do the transformation. */
12583 /* Verify the required arguments in the original call. */
12584 if (!validate_arg (fmt, POINTER_TYPE))
12587 /* Check whether the format is a literal string constant. */
12588 fmt_str = c_getstr (fmt);
12589 if (fmt_str == NULL)
12592 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12594 /* If we're using an unlocked function, assume the other
12595 unlocked functions exist explicitly. */
12596 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12597 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12601 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12602 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12605 if (!init_target_chars ())
12608 if (strcmp (fmt_str, target_percent_s) == 0
12609 || strchr (fmt_str, target_percent) == NULL)
12613 if (strcmp (fmt_str, target_percent_s) == 0)
12615 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12618 if (!arg || !validate_arg (arg, POINTER_TYPE))
12621 str = c_getstr (arg);
12627 /* The format specifier doesn't contain any '%' characters. */
12628 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12634 /* If the string was "", printf does nothing. */
12635 if (str[0] == '\0')
12636 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12638 /* If the string has length of 1, call putchar. */
12639 if (str[1] == '\0')
12641 /* Given printf("c"), (where c is any one character,)
12642 convert "c"[0] to an int and pass that to the replacement
12644 newarg = build_int_cst (NULL_TREE, str[0]);
12646 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12650 /* If the string was "string\n", call puts("string"). */
12651 size_t len = strlen (str);
12652 if ((unsigned char)str[len - 1] == target_newline)
12654 /* Create a NUL-terminated string that's one char shorter
12655 than the original, stripping off the trailing '\n'. */
12656 char *newstr = XALLOCAVEC (char, len);
12657 memcpy (newstr, str, len - 1);
12658 newstr[len - 1] = 0;
12660 newarg = build_string_literal (len, newstr);
12662 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12665 /* We'd like to arrange to call fputs(string,stdout) here,
12666 but we need stdout and don't have a way to get it yet. */
12671 /* The other optimizations can be done only on the non-va_list variants. */
12672 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12675 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12676 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12678 if (!arg || !validate_arg (arg, POINTER_TYPE))
12681 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12684 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12685 else if (strcmp (fmt_str, target_percent_c) == 0)
12687 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12690 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12696 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12699 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12700 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12701 more than 3 arguments, and ARG may be null in the 2-argument case.
12703 Return NULL_TREE if no simplification was possible, otherwise return the
12704 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12705 code of the function to be simplified. */
12708 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12709 tree fmt, tree arg, bool ignore,
12710 enum built_in_function fcode)
12712 tree fn_fputc, fn_fputs, call = NULL_TREE;
12713 const char *fmt_str = NULL;
12715 /* If the return value is used, don't do the transformation. */
12719 /* Verify the required arguments in the original call. */
12720 if (!validate_arg (fp, POINTER_TYPE))
12722 if (!validate_arg (fmt, POINTER_TYPE))
12725 /* Check whether the format is a literal string constant. */
12726 fmt_str = c_getstr (fmt);
12727 if (fmt_str == NULL)
12730 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12732 /* If we're using an unlocked function, assume the other
12733 unlocked functions exist explicitly. */
12734 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12735 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12739 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12740 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12743 if (!init_target_chars ())
12746 /* If the format doesn't contain % args or %%, use strcpy. */
12747 if (strchr (fmt_str, target_percent) == NULL)
12749 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12753 /* If the format specifier was "", fprintf does nothing. */
12754 if (fmt_str[0] == '\0')
12756 /* If FP has side-effects, just wait until gimplification is
12758 if (TREE_SIDE_EFFECTS (fp))
12761 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12764 /* When "string" doesn't contain %, replace all cases of
12765 fprintf (fp, string) with fputs (string, fp). The fputs
12766 builtin will take care of special cases like length == 1. */
12768 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12771 /* The other optimizations can be done only on the non-va_list variants. */
12772 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12775 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12776 else if (strcmp (fmt_str, target_percent_s) == 0)
12778 if (!arg || !validate_arg (arg, POINTER_TYPE))
12781 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12784 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12785 else if (strcmp (fmt_str, target_percent_c) == 0)
12787 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12790 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12795 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12798 /* Initialize format string characters in the target charset. */
12801 init_target_chars (void)
12806 target_newline = lang_hooks.to_target_charset ('\n');
12807 target_percent = lang_hooks.to_target_charset ('%');
12808 target_c = lang_hooks.to_target_charset ('c');
12809 target_s = lang_hooks.to_target_charset ('s');
12810 if (target_newline == 0 || target_percent == 0 || target_c == 0
12814 target_percent_c[0] = target_percent;
12815 target_percent_c[1] = target_c;
12816 target_percent_c[2] = '\0';
12818 target_percent_s[0] = target_percent;
12819 target_percent_s[1] = target_s;
12820 target_percent_s[2] = '\0';
12822 target_percent_s_newline[0] = target_percent;
12823 target_percent_s_newline[1] = target_s;
12824 target_percent_s_newline[2] = target_newline;
12825 target_percent_s_newline[3] = '\0';
12832 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12833 and no overflow/underflow occurred. INEXACT is true if M was not
12834 exactly calculated. TYPE is the tree type for the result. This
12835 function assumes that you cleared the MPFR flags and then
12836 calculated M to see if anything subsequently set a flag prior to
12837 entering this function. Return NULL_TREE if any checks fail. */
12840 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12842 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12843 overflow/underflow occurred. If -frounding-math, proceed iff the
12844 result of calling FUNC was exact. */
12845 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12846 && (!flag_rounding_math || !inexact))
12848 REAL_VALUE_TYPE rr;
12850 real_from_mpfr (&rr, m, type, GMP_RNDN);
12851 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12852 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12853 but the mpft_t is not, then we underflowed in the
12855 if (real_isfinite (&rr)
12856 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12858 REAL_VALUE_TYPE rmode;
12860 real_convert (&rmode, TYPE_MODE (type), &rr);
12861 /* Proceed iff the specified mode can hold the value. */
12862 if (real_identical (&rmode, &rr))
12863 return build_real (type, rmode);
12869 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12870 number and no overflow/underflow occurred. INEXACT is true if M
12871 was not exactly calculated. TYPE is the tree type for the result.
12872 This function assumes that you cleared the MPFR flags and then
12873 calculated M to see if anything subsequently set a flag prior to
12874 entering this function. Return NULL_TREE if any checks fail, if
12875 FORCE_CONVERT is true, then bypass the checks. */
12878 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12880 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12881 overflow/underflow occurred. If -frounding-math, proceed iff the
12882 result of calling FUNC was exact. */
12884 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12885 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12886 && (!flag_rounding_math || !inexact)))
12888 REAL_VALUE_TYPE re, im;
12890 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12891 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12892 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12893 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12894 but the mpft_t is not, then we underflowed in the
12897 || (real_isfinite (&re) && real_isfinite (&im)
12898 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12899 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12901 REAL_VALUE_TYPE re_mode, im_mode;
12903 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12904 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12905 /* Proceed iff the specified mode can hold the value. */
12907 || (real_identical (&re_mode, &re)
12908 && real_identical (&im_mode, &im)))
12909 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12910 build_real (TREE_TYPE (type), im_mode));
12916 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12917 FUNC on it and return the resulting value as a tree with type TYPE.
12918 If MIN and/or MAX are not NULL, then the supplied ARG must be
12919 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12920 acceptable values, otherwise they are not. The mpfr precision is
12921 set to the precision of TYPE. We assume that function FUNC returns
12922 zero if the result could be calculated exactly within the requested
12926 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12927 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12930 tree result = NULL_TREE;
12934 /* To proceed, MPFR must exactly represent the target floating point
12935 format, which only happens when the target base equals two. */
12936 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12937 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12939 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12941 if (real_isfinite (ra)
12942 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12943 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12945 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12946 const int prec = fmt->p;
12947 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12951 mpfr_init2 (m, prec);
12952 mpfr_from_real (m, ra, GMP_RNDN);
12953 mpfr_clear_flags ();
12954 inexact = func (m, m, rnd);
12955 result = do_mpfr_ckconv (m, type, inexact);
12963 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12964 FUNC on it and return the resulting value as a tree with type TYPE.
12965 The mpfr precision is set to the precision of TYPE. We assume that
12966 function FUNC returns zero if the result could be calculated
12967 exactly within the requested precision. */
12970 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12971 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12973 tree result = NULL_TREE;
12978 /* To proceed, MPFR must exactly represent the target floating point
12979 format, which only happens when the target base equals two. */
12980 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12981 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12982 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12984 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12985 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12987 if (real_isfinite (ra1) && real_isfinite (ra2))
12989 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12990 const int prec = fmt->p;
12991 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12995 mpfr_inits2 (prec, m1, m2, NULL);
12996 mpfr_from_real (m1, ra1, GMP_RNDN);
12997 mpfr_from_real (m2, ra2, GMP_RNDN);
12998 mpfr_clear_flags ();
12999 inexact = func (m1, m1, m2, rnd);
13000 result = do_mpfr_ckconv (m1, type, inexact);
13001 mpfr_clears (m1, m2, NULL);
13008 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13009 FUNC on it and return the resulting value as a tree with type TYPE.
13010 The mpfr precision is set to the precision of TYPE. We assume that
13011 function FUNC returns zero if the result could be calculated
13012 exactly within the requested precision. */
13015 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13016 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13018 tree result = NULL_TREE;
13024 /* To proceed, MPFR must exactly represent the target floating point
13025 format, which only happens when the target base equals two. */
13026 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13027 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13028 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13029 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13031 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13032 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13033 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13035 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13037 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13038 const int prec = fmt->p;
13039 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13043 mpfr_inits2 (prec, m1, m2, m3, NULL);
13044 mpfr_from_real (m1, ra1, GMP_RNDN);
13045 mpfr_from_real (m2, ra2, GMP_RNDN);
13046 mpfr_from_real (m3, ra3, GMP_RNDN);
13047 mpfr_clear_flags ();
13048 inexact = func (m1, m1, m2, m3, rnd);
13049 result = do_mpfr_ckconv (m1, type, inexact);
13050 mpfr_clears (m1, m2, m3, NULL);
13057 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13058 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13059 If ARG_SINP and ARG_COSP are NULL then the result is returned
13060 as a complex value.
13061 The type is taken from the type of ARG and is used for setting the
13062 precision of the calculation and results. */
13065 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13067 tree const type = TREE_TYPE (arg);
13068 tree result = NULL_TREE;
13072 /* To proceed, MPFR must exactly represent the target floating point
13073 format, which only happens when the target base equals two. */
13074 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13075 && TREE_CODE (arg) == REAL_CST
13076 && !TREE_OVERFLOW (arg))
13078 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13080 if (real_isfinite (ra))
13082 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13083 const int prec = fmt->p;
13084 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13085 tree result_s, result_c;
13089 mpfr_inits2 (prec, m, ms, mc, NULL);
13090 mpfr_from_real (m, ra, GMP_RNDN);
13091 mpfr_clear_flags ();
13092 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13093 result_s = do_mpfr_ckconv (ms, type, inexact);
13094 result_c = do_mpfr_ckconv (mc, type, inexact);
13095 mpfr_clears (m, ms, mc, NULL);
13096 if (result_s && result_c)
13098 /* If we are to return in a complex value do so. */
13099 if (!arg_sinp && !arg_cosp)
13100 return build_complex (build_complex_type (type),
13101 result_c, result_s);
13103 /* Dereference the sin/cos pointer arguments. */
13104 arg_sinp = build_fold_indirect_ref (arg_sinp);
13105 arg_cosp = build_fold_indirect_ref (arg_cosp);
13106 /* Proceed if valid pointer type were passed in. */
13107 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13108 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13110 /* Set the values. */
13111 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13113 TREE_SIDE_EFFECTS (result_s) = 1;
13114 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13116 TREE_SIDE_EFFECTS (result_c) = 1;
13117 /* Combine the assignments into a compound expr. */
13118 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13119 result_s, result_c));
13127 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13128 two-argument mpfr order N Bessel function FUNC on them and return
13129 the resulting value as a tree with type TYPE. The mpfr precision
13130 is set to the precision of TYPE. We assume that function FUNC
13131 returns zero if the result could be calculated exactly within the
13132 requested precision. */
13134 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13135 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13136 const REAL_VALUE_TYPE *min, bool inclusive)
13138 tree result = NULL_TREE;
13143 /* To proceed, MPFR must exactly represent the target floating point
13144 format, which only happens when the target base equals two. */
13145 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13146 && host_integerp (arg1, 0)
13147 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13149 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13150 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13153 && real_isfinite (ra)
13154 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13156 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13157 const int prec = fmt->p;
13158 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13162 mpfr_init2 (m, prec);
13163 mpfr_from_real (m, ra, GMP_RNDN);
13164 mpfr_clear_flags ();
13165 inexact = func (m, n, m, rnd);
13166 result = do_mpfr_ckconv (m, type, inexact);
13174 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13175 the pointer *(ARG_QUO) and return the result. The type is taken
13176 from the type of ARG0 and is used for setting the precision of the
13177 calculation and results. */
13180 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13182 tree const type = TREE_TYPE (arg0);
13183 tree result = NULL_TREE;
13188 /* To proceed, MPFR must exactly represent the target floating point
13189 format, which only happens when the target base equals two. */
13190 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13191 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13192 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13194 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13195 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13197 if (real_isfinite (ra0) && real_isfinite (ra1))
13199 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13200 const int prec = fmt->p;
13201 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13206 mpfr_inits2 (prec, m0, m1, NULL);
13207 mpfr_from_real (m0, ra0, GMP_RNDN);
13208 mpfr_from_real (m1, ra1, GMP_RNDN);
13209 mpfr_clear_flags ();
13210 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13211 /* Remquo is independent of the rounding mode, so pass
13212 inexact=0 to do_mpfr_ckconv(). */
13213 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13214 mpfr_clears (m0, m1, NULL);
13217 /* MPFR calculates quo in the host's long so it may
13218 return more bits in quo than the target int can hold
13219 if sizeof(host long) > sizeof(target int). This can
13220 happen even for native compilers in LP64 mode. In
13221 these cases, modulo the quo value with the largest
13222 number that the target int can hold while leaving one
13223 bit for the sign. */
13224 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13225 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13227 /* Dereference the quo pointer argument. */
13228 arg_quo = build_fold_indirect_ref (arg_quo);
13229 /* Proceed iff a valid pointer type was passed in. */
13230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13232 /* Set the value. */
13233 tree result_quo = fold_build2 (MODIFY_EXPR,
13234 TREE_TYPE (arg_quo), arg_quo,
13235 build_int_cst (NULL, integer_quo));
13236 TREE_SIDE_EFFECTS (result_quo) = 1;
13237 /* Combine the quo assignment with the rem. */
13238 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13239 result_quo, result_rem));
13247 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13248 resulting value as a tree with type TYPE. The mpfr precision is
13249 set to the precision of TYPE. We assume that this mpfr function
13250 returns zero if the result could be calculated exactly within the
13251 requested precision. In addition, the integer pointer represented
13252 by ARG_SG will be dereferenced and set to the appropriate signgam
13256 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13258 tree result = NULL_TREE;
13262 /* To proceed, MPFR must exactly represent the target floating point
13263 format, which only happens when the target base equals two. Also
13264 verify ARG is a constant and that ARG_SG is an int pointer. */
13265 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13266 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13267 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13268 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13270 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13272 /* In addition to NaN and Inf, the argument cannot be zero or a
13273 negative integer. */
13274 if (real_isfinite (ra)
13275 && ra->cl != rvc_zero
13276 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13278 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13279 const int prec = fmt->p;
13280 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13285 mpfr_init2 (m, prec);
13286 mpfr_from_real (m, ra, GMP_RNDN);
13287 mpfr_clear_flags ();
13288 inexact = mpfr_lgamma (m, &sg, m, rnd);
13289 result_lg = do_mpfr_ckconv (m, type, inexact);
13295 /* Dereference the arg_sg pointer argument. */
13296 arg_sg = build_fold_indirect_ref (arg_sg);
13297 /* Assign the signgam value into *arg_sg. */
13298 result_sg = fold_build2 (MODIFY_EXPR,
13299 TREE_TYPE (arg_sg), arg_sg,
13300 build_int_cst (NULL, sg));
13301 TREE_SIDE_EFFECTS (result_sg) = 1;
13302 /* Combine the signgam assignment with the lgamma result. */
13303 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13304 result_sg, result_lg));
13312 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13313 function FUNC on it and return the resulting value as a tree with
13314 type TYPE. The mpfr precision is set to the precision of TYPE. We
13315 assume that function FUNC returns zero if the result could be
13316 calculated exactly within the requested precision. */
13319 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13321 tree result = NULL_TREE;
13325 /* To proceed, MPFR must exactly represent the target floating point
13326 format, which only happens when the target base equals two. */
13327 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13328 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13329 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13331 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13332 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13334 if (real_isfinite (re) && real_isfinite (im))
13336 const struct real_format *const fmt =
13337 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13338 const int prec = fmt->p;
13339 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13340 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13344 mpc_init2 (m, prec);
13345 mpfr_from_real (mpc_realref(m), re, rnd);
13346 mpfr_from_real (mpc_imagref(m), im, rnd);
13347 mpfr_clear_flags ();
13348 inexact = func (m, m, crnd);
13349 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13357 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13358 mpc function FUNC on it and return the resulting value as a tree
13359 with type TYPE. The mpfr precision is set to the precision of
13360 TYPE. We assume that function FUNC returns zero if the result
13361 could be calculated exactly within the requested precision. If
13362 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13363 in the arguments and/or results. */
13366 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13367 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13369 tree result = NULL_TREE;
13374 /* To proceed, MPFR must exactly represent the target floating point
13375 format, which only happens when the target base equals two. */
13376 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13378 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13379 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13380 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13382 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13383 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13384 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13385 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13388 || (real_isfinite (re0) && real_isfinite (im0)
13389 && real_isfinite (re1) && real_isfinite (im1)))
13391 const struct real_format *const fmt =
13392 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13393 const int prec = fmt->p;
13394 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13395 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13399 mpc_init2 (m0, prec);
13400 mpc_init2 (m1, prec);
13401 mpfr_from_real (mpc_realref(m0), re0, rnd);
13402 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13403 mpfr_from_real (mpc_realref(m1), re1, rnd);
13404 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13405 mpfr_clear_flags ();
13406 inexact = func (m0, m0, m1, crnd);
13407 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13417 The functions below provide an alternate interface for folding
13418 builtin function calls presented as GIMPLE_CALL statements rather
13419 than as CALL_EXPRs. The folded result is still expressed as a
13420 tree. There is too much code duplication in the handling of
13421 varargs functions, and a more intrusive re-factoring would permit
13422 better sharing of code between the tree and statement-based
13423 versions of these functions. */
13425 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13426 along with N new arguments specified as the "..." parameters. SKIP
13427 is the number of arguments in STMT to be omitted. This function is used
13428 to do varargs-to-varargs transformations. */
13431 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13433 int oldnargs = gimple_call_num_args (stmt);
13434 int nargs = oldnargs - skip + n;
13435 tree fntype = TREE_TYPE (fndecl);
13436 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13440 location_t loc = gimple_location (stmt);
13442 buffer = XALLOCAVEC (tree, nargs);
13444 for (i = 0; i < n; i++)
13445 buffer[i] = va_arg (ap, tree);
13447 for (j = skip; j < oldnargs; j++, i++)
13448 buffer[i] = gimple_call_arg (stmt, j);
13450 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13453 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13454 a normal call should be emitted rather than expanding the function
13455 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13458 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13460 tree dest, size, len, fn, fmt, flag;
13461 const char *fmt_str;
13462 int nargs = gimple_call_num_args (stmt);
13464 /* Verify the required arguments in the original call. */
13467 dest = gimple_call_arg (stmt, 0);
13468 if (!validate_arg (dest, POINTER_TYPE))
13470 flag = gimple_call_arg (stmt, 1);
13471 if (!validate_arg (flag, INTEGER_TYPE))
13473 size = gimple_call_arg (stmt, 2);
13474 if (!validate_arg (size, INTEGER_TYPE))
13476 fmt = gimple_call_arg (stmt, 3);
13477 if (!validate_arg (fmt, POINTER_TYPE))
13480 if (! host_integerp (size, 1))
13485 if (!init_target_chars ())
13488 /* Check whether the format is a literal string constant. */
13489 fmt_str = c_getstr (fmt);
13490 if (fmt_str != NULL)
13492 /* If the format doesn't contain % args or %%, we know the size. */
13493 if (strchr (fmt_str, target_percent) == 0)
13495 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13496 len = build_int_cstu (size_type_node, strlen (fmt_str));
13498 /* If the format is "%s" and first ... argument is a string literal,
13499 we know the size too. */
13500 else if (fcode == BUILT_IN_SPRINTF_CHK
13501 && strcmp (fmt_str, target_percent_s) == 0)
13507 arg = gimple_call_arg (stmt, 4);
13508 if (validate_arg (arg, POINTER_TYPE))
13510 len = c_strlen (arg, 1);
13511 if (! len || ! host_integerp (len, 1))
13518 if (! integer_all_onesp (size))
13520 if (! len || ! tree_int_cst_lt (len, size))
13524 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13525 or if format doesn't contain % chars or is "%s". */
13526 if (! integer_zerop (flag))
13528 if (fmt_str == NULL)
13530 if (strchr (fmt_str, target_percent) != NULL
13531 && strcmp (fmt_str, target_percent_s))
13535 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13536 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13537 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13541 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13544 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13545 a normal call should be emitted rather than expanding the function
13546 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13547 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13548 passed as second argument. */
13551 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13552 enum built_in_function fcode)
13554 tree dest, size, len, fn, fmt, flag;
13555 const char *fmt_str;
13557 /* Verify the required arguments in the original call. */
13558 if (gimple_call_num_args (stmt) < 5)
13560 dest = gimple_call_arg (stmt, 0);
13561 if (!validate_arg (dest, POINTER_TYPE))
13563 len = gimple_call_arg (stmt, 1);
13564 if (!validate_arg (len, INTEGER_TYPE))
13566 flag = gimple_call_arg (stmt, 2);
13567 if (!validate_arg (flag, INTEGER_TYPE))
13569 size = gimple_call_arg (stmt, 3);
13570 if (!validate_arg (size, INTEGER_TYPE))
13572 fmt = gimple_call_arg (stmt, 4);
13573 if (!validate_arg (fmt, POINTER_TYPE))
13576 if (! host_integerp (size, 1))
13579 if (! integer_all_onesp (size))
13581 if (! host_integerp (len, 1))
13583 /* If LEN is not constant, try MAXLEN too.
13584 For MAXLEN only allow optimizing into non-_ocs function
13585 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13586 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13592 if (tree_int_cst_lt (size, maxlen))
13596 if (!init_target_chars ())
13599 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13600 or if format doesn't contain % chars or is "%s". */
13601 if (! integer_zerop (flag))
13603 fmt_str = c_getstr (fmt);
13604 if (fmt_str == NULL)
13606 if (strchr (fmt_str, target_percent) != NULL
13607 && strcmp (fmt_str, target_percent_s))
13611 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13613 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13614 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13618 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13621 /* Builtins with folding operations that operate on "..." arguments
13622 need special handling; we need to store the arguments in a convenient
13623 data structure before attempting any folding. Fortunately there are
13624 only a few builtins that fall into this category. FNDECL is the
13625 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13626 result of the function call is ignored. */
13629 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13630 bool ignore ATTRIBUTE_UNUSED)
13632 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13633 tree ret = NULL_TREE;
13637 case BUILT_IN_SPRINTF_CHK:
13638 case BUILT_IN_VSPRINTF_CHK:
13639 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13642 case BUILT_IN_SNPRINTF_CHK:
13643 case BUILT_IN_VSNPRINTF_CHK:
13644 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13651 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13652 TREE_NO_WARNING (ret) = 1;
13658 /* A wrapper function for builtin folding that prevents warnings for
13659 "statement without effect" and the like, caused by removing the
13660 call node earlier than the warning is generated. */
13663 fold_call_stmt (gimple stmt, bool ignore)
13665 tree ret = NULL_TREE;
13666 tree fndecl = gimple_call_fndecl (stmt);
13667 location_t loc = gimple_location (stmt);
13669 && TREE_CODE (fndecl) == FUNCTION_DECL
13670 && DECL_BUILT_IN (fndecl)
13671 && !gimple_call_va_arg_pack_p (stmt))
13673 int nargs = gimple_call_num_args (stmt);
13675 if (avoid_folding_inline_builtin (fndecl))
13677 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13679 return targetm.fold_builtin (fndecl, nargs,
13681 ? gimple_call_arg_ptr (stmt, 0)
13682 : &error_mark_node), ignore);
13686 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13688 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13690 for (i = 0; i < nargs; i++)
13691 args[i] = gimple_call_arg (stmt, i);
13692 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13695 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13698 /* Propagate location information from original call to
13699 expansion of builtin. Otherwise things like
13700 maybe_emit_chk_warning, that operate on the expansion
13701 of a builtin, will use the wrong location information. */
13702 if (gimple_has_location (stmt))
13704 tree realret = ret;
13705 if (TREE_CODE (ret) == NOP_EXPR)
13706 realret = TREE_OPERAND (ret, 0);
13707 if (CAN_HAVE_LOCATION_P (realret)
13708 && !EXPR_HAS_LOCATION (realret))
13709 SET_EXPR_LOCATION (realret, loc);
13719 /* Look up the function in built_in_decls that corresponds to DECL
13720 and set ASMSPEC as its user assembler name. DECL must be a
13721 function decl that declares a builtin. */
13724 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13727 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13728 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13731 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13732 set_user_assembler_name (builtin, asmspec);
13733 switch (DECL_FUNCTION_CODE (decl))
13735 case BUILT_IN_MEMCPY:
13736 init_block_move_fn (asmspec);
13737 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13739 case BUILT_IN_MEMSET:
13740 init_block_clear_fn (asmspec);
13741 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13743 case BUILT_IN_MEMMOVE:
13744 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13746 case BUILT_IN_MEMCMP:
13747 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13749 case BUILT_IN_ABORT:
13750 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13753 if (INT_TYPE_SIZE < BITS_PER_WORD)
13755 set_user_assembler_libfunc ("ffs", asmspec);
13756 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13757 MODE_INT, 0), "ffs");