1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
457 loc = input_location;
459 src = string_constant (src, &offset_node);
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
473 for (i = 0; i < max; i++)
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
491 else if (! host_integerp (offset_node, 0))
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
526 src = string_constant (src, &offset_node);
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 c_readstr (const char *str, enum machine_mode mode)
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
654 SETUP_FRAME_ADDRESSES ();
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
796 if (i == ARRAY_SIZE (elim_regs))
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1270 apply_args_size (void)
1272 static int size = -1;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1303 apply_args_mode[regno] = VOIDmode;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1313 apply_result_size (void)
1315 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1384 expand_builtin_apply_args_1 (void)
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1463 temp = expand_builtin_apply_args_1 ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1515 do_pending_stack_adjust ();
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 /* Find the CALL insn we just emitted, and attach the register usage
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 /* Restore the return value and note that each value is used. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1687 call_fusage = get_insns ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1910 expand_call (exp, target, 0);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2127 return expand_call (exp, target, target == const0_rtx);
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2218 gcc_assert (result);
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2240 target = expand_call (exp, target, target == const0_rtx);
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 /* Make a suitable register to place result in. */
2317 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2320 gcc_assert (insn_data[icode].operand[0].predicate
2321 (target, GET_MODE (target)));
2323 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2324 need to expand the argument again. This way, we will not perform
2325 side-effects more the once. */
2326 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2328 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2330 if (mode != GET_MODE (op0))
2331 op0 = convert_to_mode (mode, op0, 0);
2333 /* Compute into TARGET.
2334 Set TARGET to wherever the result comes back. */
2335 emit_unop_insn (icode, target, op0, UNKNOWN);
2342 /* Expand a call to the builtin sincos math function.
2343 Return NULL_RTX if a normal call should be emitted rather than expanding the
2344 function in-line. EXP is the expression that is a call to the builtin
2348 expand_builtin_sincos (tree exp)
2350 rtx op0, op1, op2, target1, target2;
2351 enum machine_mode mode;
2352 tree arg, sinp, cosp;
2354 location_t loc = EXPR_LOCATION (exp);
2356 if (!validate_arglist (exp, REAL_TYPE,
2357 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2360 arg = CALL_EXPR_ARG (exp, 0);
2361 sinp = CALL_EXPR_ARG (exp, 1);
2362 cosp = CALL_EXPR_ARG (exp, 2);
2364 /* Make a suitable register to place result in. */
2365 mode = TYPE_MODE (TREE_TYPE (arg));
2367 /* Check if sincos insn is available, otherwise emit the call. */
2368 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2371 target1 = gen_reg_rtx (mode);
2372 target2 = gen_reg_rtx (mode);
2374 op0 = expand_normal (arg);
2375 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2376 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2378 /* Compute into target1 and target2.
2379 Set TARGET to wherever the result comes back. */
2380 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2381 gcc_assert (result);
2383 /* Move target1 and target2 to the memory locations indicated
2385 emit_move_insn (op1, target1);
2386 emit_move_insn (op2, target2);
2391 /* Expand a call to the internal cexpi builtin to the sincos math function.
2392 EXP is the expression that is a call to the builtin function; if convenient,
2393 the result should be placed in TARGET. SUBTARGET may be used as the target
2394 for computing one of EXP's operands. */
2397 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2399 tree fndecl = get_callee_fndecl (exp);
2401 enum machine_mode mode;
2403 location_t loc = EXPR_LOCATION (exp);
2405 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2408 arg = CALL_EXPR_ARG (exp, 0);
2409 type = TREE_TYPE (arg);
2410 mode = TYPE_MODE (TREE_TYPE (arg));
2412 /* Try expanding via a sincos optab, fall back to emitting a libcall
2413 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2414 is only generated from sincos, cexp or if we have either of them. */
2415 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2417 op1 = gen_reg_rtx (mode);
2418 op2 = gen_reg_rtx (mode);
2420 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2422 /* Compute into op1 and op2. */
2423 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2425 else if (TARGET_HAS_SINCOS)
2427 tree call, fn = NULL_TREE;
2431 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2432 fn = built_in_decls[BUILT_IN_SINCOSF];
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2434 fn = built_in_decls[BUILT_IN_SINCOS];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2436 fn = built_in_decls[BUILT_IN_SINCOSL];
2440 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2441 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2442 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2443 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2444 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2445 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2447 /* Make sure not to fold the sincos call again. */
2448 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2449 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2450 call, 3, arg, top1, top2));
2454 tree call, fn = NULL_TREE, narg;
2455 tree ctype = build_complex_type (type);
2457 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2458 fn = built_in_decls[BUILT_IN_CEXPF];
2459 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2460 fn = built_in_decls[BUILT_IN_CEXP];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2462 fn = built_in_decls[BUILT_IN_CEXPL];
2466 /* If we don't have a decl for cexp create one. This is the
2467 friendliest fallback if the user calls __builtin_cexpi
2468 without full target C99 function support. */
2469 if (fn == NULL_TREE)
2472 const char *name = NULL;
2474 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2481 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2482 fn = build_fn_decl (name, fntype);
2485 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2486 build_real (type, dconst0), arg);
2488 /* Make sure not to fold the cexp call again. */
2489 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2490 return expand_expr (build_call_nary (ctype, call, 1, narg),
2491 target, VOIDmode, EXPAND_NORMAL);
2494 /* Now build the proper return type. */
2495 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2496 make_tree (TREE_TYPE (arg), op2),
2497 make_tree (TREE_TYPE (arg), op1)),
2498 target, VOIDmode, EXPAND_NORMAL);
2501 /* Conveniently construct a function call expression. FNDECL names the
2502 function to be called, N is the number of arguments, and the "..."
2503 parameters are the argument expressions. Unlike build_call_exr
2504 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2507 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2510 tree fntype = TREE_TYPE (fndecl);
2511 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2514 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2516 SET_EXPR_LOCATION (fn, loc);
2519 #define build_call_nofold(...) \
2520 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2522 /* Expand a call to one of the builtin rounding functions gcc defines
2523 as an extension (lfloor and lceil). As these are gcc extensions we
2524 do not need to worry about setting errno to EDOM.
2525 If expanding via optab fails, lower expression to (int)(floor(x)).
2526 EXP is the expression that is a call to the builtin function;
2527 if convenient, the result should be placed in TARGET. */
2530 expand_builtin_int_roundingfn (tree exp, rtx target)
2532 convert_optab builtin_optab;
2533 rtx op0, insns, tmp;
2534 tree fndecl = get_callee_fndecl (exp);
2535 enum built_in_function fallback_fn;
2536 tree fallback_fndecl;
2537 enum machine_mode mode;
2540 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2543 arg = CALL_EXPR_ARG (exp, 0);
2545 switch (DECL_FUNCTION_CODE (fndecl))
2547 CASE_FLT_FN (BUILT_IN_LCEIL):
2548 CASE_FLT_FN (BUILT_IN_LLCEIL):
2549 builtin_optab = lceil_optab;
2550 fallback_fn = BUILT_IN_CEIL;
2553 CASE_FLT_FN (BUILT_IN_LFLOOR):
2554 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2555 builtin_optab = lfloor_optab;
2556 fallback_fn = BUILT_IN_FLOOR;
2563 /* Make a suitable register to place result in. */
2564 mode = TYPE_MODE (TREE_TYPE (exp));
2566 target = gen_reg_rtx (mode);
2568 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2569 need to expand the argument again. This way, we will not perform
2570 side-effects more the once. */
2571 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2573 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2577 /* Compute into TARGET. */
2578 if (expand_sfix_optab (target, op0, builtin_optab))
2580 /* Output the entire sequence. */
2581 insns = get_insns ();
2587 /* If we were unable to expand via the builtin, stop the sequence
2588 (without outputting the insns). */
2591 /* Fall back to floating point rounding optab. */
2592 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2594 /* For non-C99 targets we may end up without a fallback fndecl here
2595 if the user called __builtin_lfloor directly. In this case emit
2596 a call to the floor/ceil variants nevertheless. This should result
2597 in the best user experience for not full C99 targets. */
2598 if (fallback_fndecl == NULL_TREE)
2601 const char *name = NULL;
2603 switch (DECL_FUNCTION_CODE (fndecl))
2605 case BUILT_IN_LCEIL:
2606 case BUILT_IN_LLCEIL:
2609 case BUILT_IN_LCEILF:
2610 case BUILT_IN_LLCEILF:
2613 case BUILT_IN_LCEILL:
2614 case BUILT_IN_LLCEILL:
2617 case BUILT_IN_LFLOOR:
2618 case BUILT_IN_LLFLOOR:
2621 case BUILT_IN_LFLOORF:
2622 case BUILT_IN_LLFLOORF:
2625 case BUILT_IN_LFLOORL:
2626 case BUILT_IN_LLFLOORL:
2633 fntype = build_function_type_list (TREE_TYPE (arg),
2634 TREE_TYPE (arg), NULL_TREE);
2635 fallback_fndecl = build_fn_decl (name, fntype);
2638 exp = build_call_nofold (fallback_fndecl, 1, arg);
2640 tmp = expand_normal (exp);
2642 /* Truncate the result of floating point optab to integer
2643 via expand_fix (). */
2644 target = gen_reg_rtx (mode);
2645 expand_fix (target, tmp, 0);
2650 /* Expand a call to one of the builtin math functions doing integer
2652 Return 0 if a normal call should be emitted rather than expanding the
2653 function in-line. EXP is the expression that is a call to the builtin
2654 function; if convenient, the result should be placed in TARGET. */
2657 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2659 convert_optab builtin_optab;
2661 tree fndecl = get_callee_fndecl (exp);
2663 enum machine_mode mode;
2665 /* There's no easy way to detect the case we need to set EDOM. */
2666 if (flag_errno_math)
2669 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2672 arg = CALL_EXPR_ARG (exp, 0);
2674 switch (DECL_FUNCTION_CODE (fndecl))
2676 CASE_FLT_FN (BUILT_IN_LRINT):
2677 CASE_FLT_FN (BUILT_IN_LLRINT):
2678 builtin_optab = lrint_optab; break;
2679 CASE_FLT_FN (BUILT_IN_LROUND):
2680 CASE_FLT_FN (BUILT_IN_LLROUND):
2681 builtin_optab = lround_optab; break;
2686 /* Make a suitable register to place result in. */
2687 mode = TYPE_MODE (TREE_TYPE (exp));
2689 target = gen_reg_rtx (mode);
2691 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2692 need to expand the argument again. This way, we will not perform
2693 side-effects more the once. */
2694 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2696 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2700 if (expand_sfix_optab (target, op0, builtin_optab))
2702 /* Output the entire sequence. */
2703 insns = get_insns ();
2709 /* If we were unable to expand via the builtin, stop the sequence
2710 (without outputting the insns) and call to the library function
2711 with the stabilized argument list. */
2714 target = expand_call (exp, target, target == const0_rtx);
2719 /* To evaluate powi(x,n), the floating point value x raised to the
2720 constant integer exponent n, we use a hybrid algorithm that
2721 combines the "window method" with look-up tables. For an
2722 introduction to exponentiation algorithms and "addition chains",
2723 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2724 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2725 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2726 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2728 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2729 multiplications to inline before calling the system library's pow
2730 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2731 so this default never requires calling pow, powf or powl. */
2733 #ifndef POWI_MAX_MULTS
2734 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2737 /* The size of the "optimal power tree" lookup table. All
2738 exponents less than this value are simply looked up in the
2739 powi_table below. This threshold is also used to size the
2740 cache of pseudo registers that hold intermediate results. */
2741 #define POWI_TABLE_SIZE 256
2743 /* The size, in bits of the window, used in the "window method"
2744 exponentiation algorithm. This is equivalent to a radix of
2745 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2746 #define POWI_WINDOW_SIZE 3
2748 /* The following table is an efficient representation of an
2749 "optimal power tree". For each value, i, the corresponding
2750 value, j, in the table states than an optimal evaluation
2751 sequence for calculating pow(x,i) can be found by evaluating
2752 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2753 100 integers is given in Knuth's "Seminumerical algorithms". */
2755 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2757 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2758 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2759 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2760 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2761 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2762 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2763 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2764 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2765 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2766 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2767 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2768 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2769 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2770 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2771 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2772 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2773 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2774 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2775 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2776 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2777 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2778 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2779 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2780 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2781 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2782 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2783 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2784 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2785 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2786 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2787 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2788 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2792 /* Return the number of multiplications required to calculate
2793 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2794 subroutine of powi_cost. CACHE is an array indicating
2795 which exponents have already been calculated. */
2798 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2800 /* If we've already calculated this exponent, then this evaluation
2801 doesn't require any additional multiplications. */
2806 return powi_lookup_cost (n - powi_table[n], cache)
2807 + powi_lookup_cost (powi_table[n], cache) + 1;
2810 /* Return the number of multiplications required to calculate
2811 powi(x,n) for an arbitrary x, given the exponent N. This
2812 function needs to be kept in sync with expand_powi below. */
2815 powi_cost (HOST_WIDE_INT n)
2817 bool cache[POWI_TABLE_SIZE];
2818 unsigned HOST_WIDE_INT digit;
2819 unsigned HOST_WIDE_INT val;
2825 /* Ignore the reciprocal when calculating the cost. */
2826 val = (n < 0) ? -n : n;
2828 /* Initialize the exponent cache. */
2829 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2834 while (val >= POWI_TABLE_SIZE)
2838 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2839 result += powi_lookup_cost (digit, cache)
2840 + POWI_WINDOW_SIZE + 1;
2841 val >>= POWI_WINDOW_SIZE;
2850 return result + powi_lookup_cost (val, cache);
2853 /* Recursive subroutine of expand_powi. This function takes the array,
2854 CACHE, of already calculated exponents and an exponent N and returns
2855 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2858 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2860 unsigned HOST_WIDE_INT digit;
2864 if (n < POWI_TABLE_SIZE)
2869 target = gen_reg_rtx (mode);
2872 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2873 op1 = expand_powi_1 (mode, powi_table[n], cache);
2877 target = gen_reg_rtx (mode);
2878 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2879 op0 = expand_powi_1 (mode, n - digit, cache);
2880 op1 = expand_powi_1 (mode, digit, cache);
2884 target = gen_reg_rtx (mode);
2885 op0 = expand_powi_1 (mode, n >> 1, cache);
2889 result = expand_mult (mode, op0, op1, target, 0);
2890 if (result != target)
2891 emit_move_insn (target, result);
2895 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2896 floating point operand in mode MODE, and N is the exponent. This
2897 function needs to be kept in sync with powi_cost above. */
2900 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2902 rtx cache[POWI_TABLE_SIZE];
2906 return CONST1_RTX (mode);
2908 memset (cache, 0, sizeof (cache));
2911 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2913 /* If the original exponent was negative, reciprocate the result. */
2915 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2916 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2921 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2922 a normal call should be emitted rather than expanding the function
2923 in-line. EXP is the expression that is a call to the builtin
2924 function; if convenient, the result should be placed in TARGET. */
2927 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2931 tree type = TREE_TYPE (exp);
2932 REAL_VALUE_TYPE cint, c, c2;
2935 enum machine_mode mode = TYPE_MODE (type);
2937 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2940 arg0 = CALL_EXPR_ARG (exp, 0);
2941 arg1 = CALL_EXPR_ARG (exp, 1);
2943 if (TREE_CODE (arg1) != REAL_CST
2944 || TREE_OVERFLOW (arg1))
2945 return expand_builtin_mathfn_2 (exp, target, subtarget);
2947 /* Handle constant exponents. */
2949 /* For integer valued exponents we can expand to an optimal multiplication
2950 sequence using expand_powi. */
2951 c = TREE_REAL_CST (arg1);
2952 n = real_to_integer (&c);
2953 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2954 if (real_identical (&c, &cint)
2955 && ((n >= -1 && n <= 2)
2956 || (flag_unsafe_math_optimizations
2957 && optimize_insn_for_speed_p ()
2958 && powi_cost (n) <= POWI_MAX_MULTS)))
2960 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2963 op = force_reg (mode, op);
2964 op = expand_powi (op, mode, n);
2969 narg0 = builtin_save_expr (arg0);
2971 /* If the exponent is not integer valued, check if it is half of an integer.
2972 In this case we can expand to sqrt (x) * x**(n/2). */
2973 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2974 if (fn != NULL_TREE)
2976 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2977 n = real_to_integer (&c2);
2978 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2979 if (real_identical (&c2, &cint)
2980 && ((flag_unsafe_math_optimizations
2981 && optimize_insn_for_speed_p ()
2982 && powi_cost (n/2) <= POWI_MAX_MULTS)
2983 /* Even the c==0.5 case cannot be done unconditionally
2984 when we need to preserve signed zeros, as
2985 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
2986 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)))
2988 tree call_expr = build_call_nofold (fn, 1, narg0);
2989 /* Use expand_expr in case the newly built call expression
2990 was folded to a non-call. */
2991 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2994 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2995 op2 = force_reg (mode, op2);
2996 op2 = expand_powi (op2, mode, abs (n / 2));
2997 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2998 0, OPTAB_LIB_WIDEN);
2999 /* If the original exponent was negative, reciprocate the
3002 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3003 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3009 /* Try if the exponent is a third of an integer. In this case
3010 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3011 different from pow (x, 1./3.) due to rounding and behavior
3012 with negative x we need to constrain this transformation to
3013 unsafe math and positive x or finite math. */
3014 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3016 && flag_unsafe_math_optimizations
3017 && (tree_expr_nonnegative_p (arg0)
3018 || !HONOR_NANS (mode)))
3020 REAL_VALUE_TYPE dconst3;
3021 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3022 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3023 real_round (&c2, mode, &c2);
3024 n = real_to_integer (&c2);
3025 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3026 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3027 real_convert (&c2, mode, &c2);
3028 if (real_identical (&c2, &c)
3029 && ((optimize_insn_for_speed_p ()
3030 && powi_cost (n/3) <= POWI_MAX_MULTS)
3033 tree call_expr = build_call_nofold (fn, 1,narg0);
3034 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3035 if (abs (n) % 3 == 2)
3036 op = expand_simple_binop (mode, MULT, op, op, op,
3037 0, OPTAB_LIB_WIDEN);
3040 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3041 op2 = force_reg (mode, op2);
3042 op2 = expand_powi (op2, mode, abs (n / 3));
3043 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3044 0, OPTAB_LIB_WIDEN);
3045 /* If the original exponent was negative, reciprocate the
3048 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3049 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3055 /* Fall back to optab expansion. */
3056 return expand_builtin_mathfn_2 (exp, target, subtarget);
3059 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3060 a normal call should be emitted rather than expanding the function
3061 in-line. EXP is the expression that is a call to the builtin
3062 function; if convenient, the result should be placed in TARGET. */
3065 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3069 enum machine_mode mode;
3070 enum machine_mode mode2;
3072 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3075 arg0 = CALL_EXPR_ARG (exp, 0);
3076 arg1 = CALL_EXPR_ARG (exp, 1);
3077 mode = TYPE_MODE (TREE_TYPE (exp));
3079 /* Handle constant power. */
3081 if (TREE_CODE (arg1) == INTEGER_CST
3082 && !TREE_OVERFLOW (arg1))
3084 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3086 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3087 Otherwise, check the number of multiplications required. */
3088 if ((TREE_INT_CST_HIGH (arg1) == 0
3089 || TREE_INT_CST_HIGH (arg1) == -1)
3090 && ((n >= -1 && n <= 2)
3091 || (optimize_insn_for_speed_p ()
3092 && powi_cost (n) <= POWI_MAX_MULTS)))
3094 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3095 op0 = force_reg (mode, op0);
3096 return expand_powi (op0, mode, n);
3100 /* Emit a libcall to libgcc. */
3102 /* Mode of the 2nd argument must match that of an int. */
3103 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3105 if (target == NULL_RTX)
3106 target = gen_reg_rtx (mode);
3108 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3109 if (GET_MODE (op0) != mode)
3110 op0 = convert_to_mode (mode, op0, 0);
3111 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3112 if (GET_MODE (op1) != mode2)
3113 op1 = convert_to_mode (mode2, op1, 0);
3115 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3116 target, LCT_CONST, mode, 2,
3117 op0, mode, op1, mode2);
3122 /* Expand expression EXP which is a call to the strlen builtin. Return
3123 NULL_RTX if we failed the caller should emit a normal call, otherwise
3124 try to get the result in TARGET, if convenient. */
3127 expand_builtin_strlen (tree exp, rtx target,
3128 enum machine_mode target_mode)
3130 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3136 tree src = CALL_EXPR_ARG (exp, 0);
3137 rtx result, src_reg, char_rtx, before_strlen;
3138 enum machine_mode insn_mode = target_mode, char_mode;
3139 enum insn_code icode = CODE_FOR_nothing;
3142 /* If the length can be computed at compile-time, return it. */
3143 len = c_strlen (src, 0);
3145 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3147 /* If the length can be computed at compile-time and is constant
3148 integer, but there are side-effects in src, evaluate
3149 src for side-effects, then return len.
3150 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3151 can be optimized into: i++; x = 3; */
3152 len = c_strlen (src, 1);
3153 if (len && TREE_CODE (len) == INTEGER_CST)
3155 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3156 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3159 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3161 /* If SRC is not a pointer type, don't do this operation inline. */
3165 /* Bail out if we can't compute strlen in the right mode. */
3166 while (insn_mode != VOIDmode)
3168 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3169 if (icode != CODE_FOR_nothing)
3172 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3174 if (insn_mode == VOIDmode)
3177 /* Make a place to write the result of the instruction. */
3181 && GET_MODE (result) == insn_mode
3182 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3183 result = gen_reg_rtx (insn_mode);
3185 /* Make a place to hold the source address. We will not expand
3186 the actual source until we are sure that the expansion will
3187 not fail -- there are trees that cannot be expanded twice. */
3188 src_reg = gen_reg_rtx (Pmode);
3190 /* Mark the beginning of the strlen sequence so we can emit the
3191 source operand later. */
3192 before_strlen = get_last_insn ();
3194 char_rtx = const0_rtx;
3195 char_mode = insn_data[(int) icode].operand[2].mode;
3196 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3198 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3200 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3201 char_rtx, GEN_INT (align));
3206 /* Now that we are assured of success, expand the source. */
3208 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3210 emit_move_insn (src_reg, pat);
3215 emit_insn_after (pat, before_strlen);
3217 emit_insn_before (pat, get_insns ());
3219 /* Return the value in the proper mode for this function. */
3220 if (GET_MODE (result) == target_mode)
3222 else if (target != 0)
3223 convert_move (target, result, 0);
3225 target = convert_to_mode (target_mode, result, 0);
3231 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3232 bytes from constant string DATA + OFFSET and return it as target
3236 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3237 enum machine_mode mode)
3239 const char *str = (const char *) data;
3241 gcc_assert (offset >= 0
3242 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3243 <= strlen (str) + 1));
3245 return c_readstr (str + offset, mode);
3248 /* Expand a call EXP to the memcpy builtin.
3249 Return NULL_RTX if we failed, the caller should emit a normal call,
3250 otherwise try to get the result in TARGET, if convenient (and in
3251 mode MODE if that's convenient). */
3254 expand_builtin_memcpy (tree exp, rtx target)
3256 if (!validate_arglist (exp,
3257 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3261 tree dest = CALL_EXPR_ARG (exp, 0);
3262 tree src = CALL_EXPR_ARG (exp, 1);
3263 tree len = CALL_EXPR_ARG (exp, 2);
3264 const char *src_str;
3265 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3266 unsigned int dest_align
3267 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3268 rtx dest_mem, src_mem, dest_addr, len_rtx;
3269 HOST_WIDE_INT expected_size = -1;
3270 unsigned int expected_align = 0;
3272 /* If DEST is not a pointer type, call the normal function. */
3273 if (dest_align == 0)
3276 /* If either SRC is not a pointer type, don't do this
3277 operation in-line. */
3281 if (currently_expanding_gimple_stmt)
3282 stringop_block_profile (currently_expanding_gimple_stmt,
3283 &expected_align, &expected_size);
3285 if (expected_align < dest_align)
3286 expected_align = dest_align;
3287 dest_mem = get_memory_rtx (dest, len);
3288 set_mem_align (dest_mem, dest_align);
3289 len_rtx = expand_normal (len);
3290 src_str = c_getstr (src);
3292 /* If SRC is a string constant and block move would be done
3293 by pieces, we can avoid loading the string from memory
3294 and only stored the computed constants. */
3296 && CONST_INT_P (len_rtx)
3297 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3298 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3299 CONST_CAST (char *, src_str),
3302 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3303 builtin_memcpy_read_str,
3304 CONST_CAST (char *, src_str),
3305 dest_align, false, 0);
3306 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3307 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3311 src_mem = get_memory_rtx (src, len);
3312 set_mem_align (src_mem, src_align);
3314 /* Copy word part most expediently. */
3315 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3316 CALL_EXPR_TAILCALL (exp)
3317 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3318 expected_align, expected_size);
3322 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3323 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3329 /* Expand a call EXP to the mempcpy builtin.
3330 Return NULL_RTX if we failed; the caller should emit a normal call,
3331 otherwise try to get the result in TARGET, if convenient (and in
3332 mode MODE if that's convenient). If ENDP is 0 return the
3333 destination pointer, if ENDP is 1 return the end pointer ala
3334 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3338 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3340 if (!validate_arglist (exp,
3341 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 1);
3347 tree len = CALL_EXPR_ARG (exp, 2);
3348 return expand_builtin_mempcpy_args (dest, src, len,
3349 target, mode, /*endp=*/ 1);
3353 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3354 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3355 so that this can also be called without constructing an actual CALL_EXPR.
3356 The other arguments and return value are the same as for
3357 expand_builtin_mempcpy. */
3360 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3361 rtx target, enum machine_mode mode, int endp)
3363 /* If return value is ignored, transform mempcpy into memcpy. */
3364 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3366 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3367 tree result = build_call_nofold (fn, 3, dest, src, len);
3368 return expand_expr (result, target, mode, EXPAND_NORMAL);
3372 const char *src_str;
3373 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3374 unsigned int dest_align
3375 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3376 rtx dest_mem, src_mem, len_rtx;
3378 /* If either SRC or DEST is not a pointer type, don't do this
3379 operation in-line. */
3380 if (dest_align == 0 || src_align == 0)
3383 /* If LEN is not constant, call the normal function. */
3384 if (! host_integerp (len, 1))
3387 len_rtx = expand_normal (len);
3388 src_str = c_getstr (src);
3390 /* If SRC is a string constant and block move would be done
3391 by pieces, we can avoid loading the string from memory
3392 and only stored the computed constants. */
3394 && CONST_INT_P (len_rtx)
3395 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3396 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3397 CONST_CAST (char *, src_str),
3400 dest_mem = get_memory_rtx (dest, len);
3401 set_mem_align (dest_mem, dest_align);
3402 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3403 builtin_memcpy_read_str,
3404 CONST_CAST (char *, src_str),
3405 dest_align, false, endp);
3406 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3407 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3411 if (CONST_INT_P (len_rtx)
3412 && can_move_by_pieces (INTVAL (len_rtx),
3413 MIN (dest_align, src_align)))
3415 dest_mem = get_memory_rtx (dest, len);
3416 set_mem_align (dest_mem, dest_align);
3417 src_mem = get_memory_rtx (src, len);
3418 set_mem_align (src_mem, src_align);
3419 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3420 MIN (dest_align, src_align), endp);
3421 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3422 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3431 # define HAVE_movstr 0
3432 # define CODE_FOR_movstr CODE_FOR_nothing
3435 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3436 we failed, the caller should emit a normal call, otherwise try to
3437 get the result in TARGET, if convenient. If ENDP is 0 return the
3438 destination pointer, if ENDP is 1 return the end pointer ala
3439 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3443 expand_movstr (tree dest, tree src, rtx target, int endp)
3449 const struct insn_data * data;
3454 dest_mem = get_memory_rtx (dest, NULL);
3455 src_mem = get_memory_rtx (src, NULL);
3458 target = force_reg (Pmode, XEXP (dest_mem, 0));
3459 dest_mem = replace_equiv_address (dest_mem, target);
3460 end = gen_reg_rtx (Pmode);
3464 if (target == 0 || target == const0_rtx)
3466 end = gen_reg_rtx (Pmode);
3474 data = insn_data + CODE_FOR_movstr;
3476 if (data->operand[0].mode != VOIDmode)
3477 end = gen_lowpart (data->operand[0].mode, end);
3479 insn = data->genfun (end, dest_mem, src_mem);
3485 /* movstr is supposed to set end to the address of the NUL
3486 terminator. If the caller requested a mempcpy-like return value,
3488 if (endp == 1 && target != const0_rtx)
3490 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3491 emit_move_insn (target, force_operand (tem, NULL_RTX));
3497 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3498 NULL_RTX if we failed the caller should emit a normal call, otherwise
3499 try to get the result in TARGET, if convenient (and in mode MODE if that's
3503 expand_builtin_strcpy (tree exp, rtx target)
3505 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3507 tree dest = CALL_EXPR_ARG (exp, 0);
3508 tree src = CALL_EXPR_ARG (exp, 1);
3509 return expand_builtin_strcpy_args (dest, src, target);
3514 /* Helper function to do the actual work for expand_builtin_strcpy. The
3515 arguments to the builtin_strcpy call DEST and SRC are broken out
3516 so that this can also be called without constructing an actual CALL_EXPR.
3517 The other arguments and return value are the same as for
3518 expand_builtin_strcpy. */
3521 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3523 return expand_movstr (dest, src, target, /*endp=*/0);
3526 /* Expand a call EXP to the stpcpy builtin.
3527 Return NULL_RTX if we failed the caller should emit a normal call,
3528 otherwise try to get the result in TARGET, if convenient (and in
3529 mode MODE if that's convenient). */
3532 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3535 location_t loc = EXPR_LOCATION (exp);
3537 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3540 dst = CALL_EXPR_ARG (exp, 0);
3541 src = CALL_EXPR_ARG (exp, 1);
3543 /* If return value is ignored, transform stpcpy into strcpy. */
3544 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3546 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3547 tree result = build_call_nofold (fn, 2, dst, src);
3548 return expand_expr (result, target, mode, EXPAND_NORMAL);
3555 /* Ensure we get an actual string whose length can be evaluated at
3556 compile-time, not an expression containing a string. This is
3557 because the latter will potentially produce pessimized code
3558 when used to produce the return value. */
3559 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3560 return expand_movstr (dst, src, target, /*endp=*/2);
3562 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3563 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3564 target, mode, /*endp=*/2);
3569 if (TREE_CODE (len) == INTEGER_CST)
3571 rtx len_rtx = expand_normal (len);
3573 if (CONST_INT_P (len_rtx))
3575 ret = expand_builtin_strcpy_args (dst, src, target);
3581 if (mode != VOIDmode)
3582 target = gen_reg_rtx (mode);
3584 target = gen_reg_rtx (GET_MODE (ret));
3586 if (GET_MODE (target) != GET_MODE (ret))
3587 ret = gen_lowpart (GET_MODE (target), ret);
3589 ret = plus_constant (ret, INTVAL (len_rtx));
3590 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3598 return expand_movstr (dst, src, target, /*endp=*/2);
3602 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3603 bytes from constant string DATA + OFFSET and return it as target
3607 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3608 enum machine_mode mode)
3610 const char *str = (const char *) data;
3612 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3615 return c_readstr (str + offset, mode);
3618 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3619 NULL_RTX if we failed the caller should emit a normal call. */
3622 expand_builtin_strncpy (tree exp, rtx target)
3624 location_t loc = EXPR_LOCATION (exp);
3626 if (validate_arglist (exp,
3627 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3629 tree dest = CALL_EXPR_ARG (exp, 0);
3630 tree src = CALL_EXPR_ARG (exp, 1);
3631 tree len = CALL_EXPR_ARG (exp, 2);
3632 tree slen = c_strlen (src, 1);
3634 /* We must be passed a constant len and src parameter. */
3635 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3638 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3640 /* We're required to pad with trailing zeros if the requested
3641 len is greater than strlen(s2)+1. In that case try to
3642 use store_by_pieces, if it fails, punt. */
3643 if (tree_int_cst_lt (slen, len))
3645 unsigned int dest_align
3646 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3647 const char *p = c_getstr (src);
3650 if (!p || dest_align == 0 || !host_integerp (len, 1)
3651 || !can_store_by_pieces (tree_low_cst (len, 1),
3652 builtin_strncpy_read_str,
3653 CONST_CAST (char *, p),
3657 dest_mem = get_memory_rtx (dest, len);
3658 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3659 builtin_strncpy_read_str,
3660 CONST_CAST (char *, p), dest_align, false, 0);
3661 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3662 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3669 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3670 bytes from constant string DATA + OFFSET and return it as target
3674 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3675 enum machine_mode mode)
3677 const char *c = (const char *) data;
3678 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3680 memset (p, *c, GET_MODE_SIZE (mode));
3682 return c_readstr (p, mode);
3685 /* Callback routine for store_by_pieces. Return the RTL of a register
3686 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3687 char value given in the RTL register data. For example, if mode is
3688 4 bytes wide, return the RTL for 0x01010101*data. */
3691 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3692 enum machine_mode mode)
3698 size = GET_MODE_SIZE (mode);
3702 p = XALLOCAVEC (char, size);
3703 memset (p, 1, size);
3704 coeff = c_readstr (p, mode);
3706 target = convert_to_mode (mode, (rtx) data, 1);
3707 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3708 return force_reg (mode, target);
3711 /* Expand expression EXP, which is a call to the memset builtin. Return
3712 NULL_RTX if we failed the caller should emit a normal call, otherwise
3713 try to get the result in TARGET, if convenient (and in mode MODE if that's
3717 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3719 if (!validate_arglist (exp,
3720 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724 tree dest = CALL_EXPR_ARG (exp, 0);
3725 tree val = CALL_EXPR_ARG (exp, 1);
3726 tree len = CALL_EXPR_ARG (exp, 2);
3727 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3731 /* Helper function to do the actual work for expand_builtin_memset. The
3732 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3733 so that this can also be called without constructing an actual CALL_EXPR.
3734 The other arguments and return value are the same as for
3735 expand_builtin_memset. */
3738 expand_builtin_memset_args (tree dest, tree val, tree len,
3739 rtx target, enum machine_mode mode, tree orig_exp)
3742 enum built_in_function fcode;
3744 unsigned int dest_align;
3745 rtx dest_mem, dest_addr, len_rtx;
3746 HOST_WIDE_INT expected_size = -1;
3747 unsigned int expected_align = 0;
3749 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3751 /* If DEST is not a pointer type, don't do this operation in-line. */
3752 if (dest_align == 0)
3755 if (currently_expanding_gimple_stmt)
3756 stringop_block_profile (currently_expanding_gimple_stmt,
3757 &expected_align, &expected_size);
3759 if (expected_align < dest_align)
3760 expected_align = dest_align;
3762 /* If the LEN parameter is zero, return DEST. */
3763 if (integer_zerop (len))
3765 /* Evaluate and ignore VAL in case it has side-effects. */
3766 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3767 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3770 /* Stabilize the arguments in case we fail. */
3771 dest = builtin_save_expr (dest);
3772 val = builtin_save_expr (val);
3773 len = builtin_save_expr (len);
3775 len_rtx = expand_normal (len);
3776 dest_mem = get_memory_rtx (dest, len);
3778 if (TREE_CODE (val) != INTEGER_CST)
3782 val_rtx = expand_normal (val);
3783 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3786 /* Assume that we can memset by pieces if we can store
3787 * the coefficients by pieces (in the required modes).
3788 * We can't pass builtin_memset_gen_str as that emits RTL. */
3790 if (host_integerp (len, 1)
3791 && can_store_by_pieces (tree_low_cst (len, 1),
3792 builtin_memset_read_str, &c, dest_align,
3795 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3797 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3798 builtin_memset_gen_str, val_rtx, dest_align,
3801 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3802 dest_align, expected_align,
3806 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3807 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3811 if (target_char_cast (val, &c))
3816 if (host_integerp (len, 1)
3817 && can_store_by_pieces (tree_low_cst (len, 1),
3818 builtin_memset_read_str, &c, dest_align,
3820 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3821 builtin_memset_read_str, &c, dest_align, true, 0);
3822 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3823 dest_align, expected_align,
3827 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3828 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3832 set_mem_align (dest_mem, dest_align);
3833 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3834 CALL_EXPR_TAILCALL (orig_exp)
3835 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3836 expected_align, expected_size);
3840 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3841 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3847 fndecl = get_callee_fndecl (orig_exp);
3848 fcode = DECL_FUNCTION_CODE (fndecl);
3849 if (fcode == BUILT_IN_MEMSET)
3850 fn = build_call_nofold (fndecl, 3, dest, val, len);
3851 else if (fcode == BUILT_IN_BZERO)
3852 fn = build_call_nofold (fndecl, 2, dest, len);
3855 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3856 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3857 return expand_call (fn, target, target == const0_rtx);
3860 /* Expand expression EXP, which is a call to the bzero builtin. Return
3861 NULL_RTX if we failed the caller should emit a normal call. */
3864 expand_builtin_bzero (tree exp)
3867 location_t loc = EXPR_LOCATION (exp);
3869 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3872 dest = CALL_EXPR_ARG (exp, 0);
3873 size = CALL_EXPR_ARG (exp, 1);
3875 /* New argument list transforming bzero(ptr x, int y) to
3876 memset(ptr x, int 0, size_t y). This is done this way
3877 so that if it isn't expanded inline, we fallback to
3878 calling bzero instead of memset. */
3880 return expand_builtin_memset_args (dest, integer_zero_node,
3881 fold_convert_loc (loc, sizetype, size),
3882 const0_rtx, VOIDmode, exp);
3885 /* Expand expression EXP, which is a call to the memcmp built-in function.
3886 Return NULL_RTX if we failed and the
3887 caller should emit a normal call, otherwise try to get the result in
3888 TARGET, if convenient (and in mode MODE, if that's convenient). */
3891 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3892 ATTRIBUTE_UNUSED enum machine_mode mode)
3894 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3896 if (!validate_arglist (exp,
3897 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3900 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3902 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3905 tree arg1 = CALL_EXPR_ARG (exp, 0);
3906 tree arg2 = CALL_EXPR_ARG (exp, 1);
3907 tree len = CALL_EXPR_ARG (exp, 2);
3910 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3912 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3913 enum machine_mode insn_mode;
3915 #ifdef HAVE_cmpmemsi
3917 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3920 #ifdef HAVE_cmpstrnsi
3922 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3927 /* If we don't have POINTER_TYPE, call the function. */
3928 if (arg1_align == 0 || arg2_align == 0)
3931 /* Make a place to write the result of the instruction. */
3934 && REG_P (result) && GET_MODE (result) == insn_mode
3935 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3936 result = gen_reg_rtx (insn_mode);
3938 arg1_rtx = get_memory_rtx (arg1, len);
3939 arg2_rtx = get_memory_rtx (arg2, len);
3940 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3942 /* Set MEM_SIZE as appropriate. */
3943 if (CONST_INT_P (arg3_rtx))
3945 set_mem_size (arg1_rtx, arg3_rtx);
3946 set_mem_size (arg2_rtx, arg3_rtx);
3949 #ifdef HAVE_cmpmemsi
3951 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3952 GEN_INT (MIN (arg1_align, arg2_align)));
3955 #ifdef HAVE_cmpstrnsi
3957 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3958 GEN_INT (MIN (arg1_align, arg2_align)));
3966 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3967 TYPE_MODE (integer_type_node), 3,
3968 XEXP (arg1_rtx, 0), Pmode,
3969 XEXP (arg2_rtx, 0), Pmode,
3970 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3971 TYPE_UNSIGNED (sizetype)),
3972 TYPE_MODE (sizetype));
3974 /* Return the value in the proper mode for this function. */
3975 mode = TYPE_MODE (TREE_TYPE (exp));
3976 if (GET_MODE (result) == mode)
3978 else if (target != 0)
3980 convert_move (target, result, 0);
3984 return convert_to_mode (mode, result, 0);
3991 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3992 if we failed the caller should emit a normal call, otherwise try to get
3993 the result in TARGET, if convenient. */
3996 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3998 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4001 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4002 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4003 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4005 rtx arg1_rtx, arg2_rtx;
4006 rtx result, insn = NULL_RTX;
4008 tree arg1 = CALL_EXPR_ARG (exp, 0);
4009 tree arg2 = CALL_EXPR_ARG (exp, 1);
4012 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4014 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4016 /* If we don't have POINTER_TYPE, call the function. */
4017 if (arg1_align == 0 || arg2_align == 0)
4020 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4021 arg1 = builtin_save_expr (arg1);
4022 arg2 = builtin_save_expr (arg2);
4024 arg1_rtx = get_memory_rtx (arg1, NULL);
4025 arg2_rtx = get_memory_rtx (arg2, NULL);
4027 #ifdef HAVE_cmpstrsi
4028 /* Try to call cmpstrsi. */
4031 enum machine_mode insn_mode
4032 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4034 /* Make a place to write the result of the instruction. */
4037 && REG_P (result) && GET_MODE (result) == insn_mode
4038 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4039 result = gen_reg_rtx (insn_mode);
4041 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4042 GEN_INT (MIN (arg1_align, arg2_align)));
4045 #ifdef HAVE_cmpstrnsi
4046 /* Try to determine at least one length and call cmpstrnsi. */
4047 if (!insn && HAVE_cmpstrnsi)
4052 enum machine_mode insn_mode
4053 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4054 tree len1 = c_strlen (arg1, 1);
4055 tree len2 = c_strlen (arg2, 1);
4058 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4060 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4062 /* If we don't have a constant length for the first, use the length
4063 of the second, if we know it. We don't require a constant for
4064 this case; some cost analysis could be done if both are available
4065 but neither is constant. For now, assume they're equally cheap,
4066 unless one has side effects. If both strings have constant lengths,
4073 else if (TREE_SIDE_EFFECTS (len1))
4075 else if (TREE_SIDE_EFFECTS (len2))
4077 else if (TREE_CODE (len1) != INTEGER_CST)
4079 else if (TREE_CODE (len2) != INTEGER_CST)
4081 else if (tree_int_cst_lt (len1, len2))
4086 /* If both arguments have side effects, we cannot optimize. */
4087 if (!len || TREE_SIDE_EFFECTS (len))
4090 arg3_rtx = expand_normal (len);
4092 /* Make a place to write the result of the instruction. */
4095 && REG_P (result) && GET_MODE (result) == insn_mode
4096 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4097 result = gen_reg_rtx (insn_mode);
4099 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4100 GEN_INT (MIN (arg1_align, arg2_align)));
4106 enum machine_mode mode;
4109 /* Return the value in the proper mode for this function. */
4110 mode = TYPE_MODE (TREE_TYPE (exp));
4111 if (GET_MODE (result) == mode)
4114 return convert_to_mode (mode, result, 0);
4115 convert_move (target, result, 0);
4119 /* Expand the library call ourselves using a stabilized argument
4120 list to avoid re-evaluating the function's arguments twice. */
4121 #ifdef HAVE_cmpstrnsi
4124 fndecl = get_callee_fndecl (exp);
4125 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4126 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4127 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4128 return expand_call (fn, target, target == const0_rtx);
4134 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4135 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4136 the result in TARGET, if convenient. */
4139 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4140 ATTRIBUTE_UNUSED enum machine_mode mode)
4142 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4144 if (!validate_arglist (exp,
4145 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4148 /* If c_strlen can determine an expression for one of the string
4149 lengths, and it doesn't have side effects, then emit cmpstrnsi
4150 using length MIN(strlen(string)+1, arg3). */
4151 #ifdef HAVE_cmpstrnsi
4154 tree len, len1, len2;
4155 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4158 tree arg1 = CALL_EXPR_ARG (exp, 0);
4159 tree arg2 = CALL_EXPR_ARG (exp, 1);
4160 tree arg3 = CALL_EXPR_ARG (exp, 2);
4163 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4165 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4166 enum machine_mode insn_mode
4167 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4169 len1 = c_strlen (arg1, 1);
4170 len2 = c_strlen (arg2, 1);
4173 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4175 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4177 /* If we don't have a constant length for the first, use the length
4178 of the second, if we know it. We don't require a constant for
4179 this case; some cost analysis could be done if both are available
4180 but neither is constant. For now, assume they're equally cheap,
4181 unless one has side effects. If both strings have constant lengths,
4188 else if (TREE_SIDE_EFFECTS (len1))
4190 else if (TREE_SIDE_EFFECTS (len2))
4192 else if (TREE_CODE (len1) != INTEGER_CST)
4194 else if (TREE_CODE (len2) != INTEGER_CST)
4196 else if (tree_int_cst_lt (len1, len2))
4201 /* If both arguments have side effects, we cannot optimize. */
4202 if (!len || TREE_SIDE_EFFECTS (len))
4205 /* The actual new length parameter is MIN(len,arg3). */
4206 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4207 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4209 /* If we don't have POINTER_TYPE, call the function. */
4210 if (arg1_align == 0 || arg2_align == 0)
4213 /* Make a place to write the result of the instruction. */
4216 && REG_P (result) && GET_MODE (result) == insn_mode
4217 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4218 result = gen_reg_rtx (insn_mode);
4220 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4221 arg1 = builtin_save_expr (arg1);
4222 arg2 = builtin_save_expr (arg2);
4223 len = builtin_save_expr (len);
4225 arg1_rtx = get_memory_rtx (arg1, len);
4226 arg2_rtx = get_memory_rtx (arg2, len);
4227 arg3_rtx = expand_normal (len);
4228 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4229 GEN_INT (MIN (arg1_align, arg2_align)));
4234 /* Return the value in the proper mode for this function. */
4235 mode = TYPE_MODE (TREE_TYPE (exp));
4236 if (GET_MODE (result) == mode)
4239 return convert_to_mode (mode, result, 0);
4240 convert_move (target, result, 0);
4244 /* Expand the library call ourselves using a stabilized argument
4245 list to avoid re-evaluating the function's arguments twice. */
4246 fndecl = get_callee_fndecl (exp);
4247 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4248 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4249 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4250 return expand_call (fn, target, target == const0_rtx);
4256 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4257 if that's convenient. */
4260 expand_builtin_saveregs (void)
4264 /* Don't do __builtin_saveregs more than once in a function.
4265 Save the result of the first call and reuse it. */
4266 if (saveregs_value != 0)
4267 return saveregs_value;
4269 /* When this function is called, it means that registers must be
4270 saved on entry to this function. So we migrate the call to the
4271 first insn of this function. */
4275 /* Do whatever the machine needs done in this case. */
4276 val = targetm.calls.expand_builtin_saveregs ();
4281 saveregs_value = val;
4283 /* Put the insns after the NOTE that starts the function. If this
4284 is inside a start_sequence, make the outer-level insn chain current, so
4285 the code is placed at the start of the function. */
4286 push_topmost_sequence ();
4287 emit_insn_after (seq, entry_of_function ());
4288 pop_topmost_sequence ();
4293 /* __builtin_args_info (N) returns word N of the arg space info
4294 for the current function. The number and meanings of words
4295 is controlled by the definition of CUMULATIVE_ARGS. */
4298 expand_builtin_args_info (tree exp)
4300 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4301 int *word_ptr = (int *) &crtl->args.info;
4303 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4305 if (call_expr_nargs (exp) != 0)
4307 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4308 error ("argument of %<__builtin_args_info%> must be constant");
4311 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4313 if (wordnum < 0 || wordnum >= nwords)
4314 error ("argument of %<__builtin_args_info%> out of range");
4316 return GEN_INT (word_ptr[wordnum]);
4320 error ("missing argument in %<__builtin_args_info%>");
4325 /* Expand a call to __builtin_next_arg. */
4328 expand_builtin_next_arg (void)
4330 /* Checking arguments is already done in fold_builtin_next_arg
4331 that must be called before this function. */
4332 return expand_binop (ptr_mode, add_optab,
4333 crtl->args.internal_arg_pointer,
4334 crtl->args.arg_offset_rtx,
4335 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4338 /* Make it easier for the backends by protecting the valist argument
4339 from multiple evaluations. */
4342 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4344 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4346 gcc_assert (vatype != NULL_TREE);
4348 if (TREE_CODE (vatype) == ARRAY_TYPE)
4350 if (TREE_SIDE_EFFECTS (valist))
4351 valist = save_expr (valist);
4353 /* For this case, the backends will be expecting a pointer to
4354 vatype, but it's possible we've actually been given an array
4355 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4357 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4359 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4360 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4369 if (! TREE_SIDE_EFFECTS (valist))
4372 pt = build_pointer_type (vatype);
4373 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4374 TREE_SIDE_EFFECTS (valist) = 1;
4377 if (TREE_SIDE_EFFECTS (valist))
4378 valist = save_expr (valist);
4379 valist = build_fold_indirect_ref_loc (loc, valist);
4385 /* The "standard" definition of va_list is void*. */
4388 std_build_builtin_va_list (void)
4390 return ptr_type_node;
4393 /* The "standard" abi va_list is va_list_type_node. */
4396 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4398 return va_list_type_node;
4401 /* The "standard" type of va_list is va_list_type_node. */
4404 std_canonical_va_list_type (tree type)
4408 if (INDIRECT_REF_P (type))
4409 type = TREE_TYPE (type);
4410 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4411 type = TREE_TYPE (type);
4412 wtype = va_list_type_node;
4414 /* Treat structure va_list types. */
4415 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4416 htype = TREE_TYPE (htype);
4417 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4419 /* If va_list is an array type, the argument may have decayed
4420 to a pointer type, e.g. by being passed to another function.
4421 In that case, unwrap both types so that we can compare the
4422 underlying records. */
4423 if (TREE_CODE (htype) == ARRAY_TYPE
4424 || POINTER_TYPE_P (htype))
4426 wtype = TREE_TYPE (wtype);
4427 htype = TREE_TYPE (htype);
4430 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4431 return va_list_type_node;
4436 /* The "standard" implementation of va_start: just assign `nextarg' to
4440 std_expand_builtin_va_start (tree valist, rtx nextarg)
4442 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4443 convert_move (va_r, nextarg, 0);
4446 /* Expand EXP, a call to __builtin_va_start. */
4449 expand_builtin_va_start (tree exp)
4453 location_t loc = EXPR_LOCATION (exp);
4455 if (call_expr_nargs (exp) < 2)
4457 error_at (loc, "too few arguments to function %<va_start%>");
4461 if (fold_builtin_next_arg (exp, true))
4464 nextarg = expand_builtin_next_arg ();
4465 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4467 if (targetm.expand_builtin_va_start)
4468 targetm.expand_builtin_va_start (valist, nextarg);
4470 std_expand_builtin_va_start (valist, nextarg);
4475 /* The "standard" implementation of va_arg: read the value from the
4476 current (padded) address and increment by the (padded) size. */
4479 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4482 tree addr, t, type_size, rounded_size, valist_tmp;
4483 unsigned HOST_WIDE_INT align, boundary;
4486 #ifdef ARGS_GROW_DOWNWARD
4487 /* All of the alignment and movement below is for args-grow-up machines.
4488 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4489 implement their own specialized gimplify_va_arg_expr routines. */
4493 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4495 type = build_pointer_type (type);
4497 align = PARM_BOUNDARY / BITS_PER_UNIT;
4498 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4500 /* When we align parameter on stack for caller, if the parameter
4501 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4502 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4503 here with caller. */
4504 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4505 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4507 boundary /= BITS_PER_UNIT;
4509 /* Hoist the valist value into a temporary for the moment. */
4510 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4512 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4513 requires greater alignment, we must perform dynamic alignment. */
4514 if (boundary > align
4515 && !integer_zerop (TYPE_SIZE (type)))
4517 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4518 fold_build2 (POINTER_PLUS_EXPR,
4520 valist_tmp, size_int (boundary - 1)));
4521 gimplify_and_add (t, pre_p);
4523 t = fold_convert (sizetype, valist_tmp);
4524 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4525 fold_convert (TREE_TYPE (valist),
4526 fold_build2 (BIT_AND_EXPR, sizetype, t,
4527 size_int (-boundary))));
4528 gimplify_and_add (t, pre_p);
4533 /* If the actual alignment is less than the alignment of the type,
4534 adjust the type accordingly so that we don't assume strict alignment
4535 when dereferencing the pointer. */
4536 boundary *= BITS_PER_UNIT;
4537 if (boundary < TYPE_ALIGN (type))
4539 type = build_variant_type_copy (type);
4540 TYPE_ALIGN (type) = boundary;
4543 /* Compute the rounded size of the type. */
4544 type_size = size_in_bytes (type);
4545 rounded_size = round_up (type_size, align);
4547 /* Reduce rounded_size so it's sharable with the postqueue. */
4548 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4552 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4554 /* Small args are padded downward. */
4555 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4556 rounded_size, size_int (align));
4557 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4558 size_binop (MINUS_EXPR, rounded_size, type_size));
4559 addr = fold_build2 (POINTER_PLUS_EXPR,
4560 TREE_TYPE (addr), addr, t);
4563 /* Compute new value for AP. */
4564 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4565 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4566 gimplify_and_add (t, pre_p);
4568 addr = fold_convert (build_pointer_type (type), addr);
4571 addr = build_va_arg_indirect_ref (addr);
4573 return build_va_arg_indirect_ref (addr);
4576 /* Build an indirect-ref expression over the given TREE, which represents a
4577 piece of a va_arg() expansion. */
4579 build_va_arg_indirect_ref (tree addr)
4581 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4583 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4589 /* Return a dummy expression of type TYPE in order to keep going after an
4593 dummy_object (tree type)
4595 tree t = build_int_cst (build_pointer_type (type), 0);
4596 return build1 (INDIRECT_REF, type, t);
4599 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4600 builtin function, but a very special sort of operator. */
4602 enum gimplify_status
4603 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4605 tree promoted_type, have_va_type;
4606 tree valist = TREE_OPERAND (*expr_p, 0);
4607 tree type = TREE_TYPE (*expr_p);
4609 location_t loc = EXPR_LOCATION (*expr_p);
4611 /* Verify that valist is of the proper type. */
4612 have_va_type = TREE_TYPE (valist);
4613 if (have_va_type == error_mark_node)
4615 have_va_type = targetm.canonical_va_list_type (have_va_type);
4617 if (have_va_type == NULL_TREE)
4619 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4623 /* Generate a diagnostic for requesting data of a type that cannot
4624 be passed through `...' due to type promotion at the call site. */
4625 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4628 static bool gave_help;
4631 /* Unfortunately, this is merely undefined, rather than a constraint
4632 violation, so we cannot make this an error. If this call is never
4633 executed, the program is still strictly conforming. */
4634 warned = warning_at (loc, 0,
4635 "%qT is promoted to %qT when passed through %<...%>",
4636 type, promoted_type);
4637 if (!gave_help && warned)
4640 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4641 promoted_type, type);
4644 /* We can, however, treat "undefined" any way we please.
4645 Call abort to encourage the user to fix the program. */
4647 inform (loc, "if this code is reached, the program will abort");
4648 /* Before the abort, allow the evaluation of the va_list
4649 expression to exit or longjmp. */
4650 gimplify_and_add (valist, pre_p);
4651 t = build_call_expr_loc (loc,
4652 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4653 gimplify_and_add (t, pre_p);
4655 /* This is dead code, but go ahead and finish so that the
4656 mode of the result comes out right. */
4657 *expr_p = dummy_object (type);
4662 /* Make it easier for the backends by protecting the valist argument
4663 from multiple evaluations. */
4664 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4666 /* For this case, the backends will be expecting a pointer to
4667 TREE_TYPE (abi), but it's possible we've
4668 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4670 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4672 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4673 valist = fold_convert_loc (loc, p1,
4674 build_fold_addr_expr_loc (loc, valist));
4677 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4680 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4682 if (!targetm.gimplify_va_arg_expr)
4683 /* FIXME: Once most targets are converted we should merely
4684 assert this is non-null. */
4687 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4692 /* Expand EXP, a call to __builtin_va_end. */
4695 expand_builtin_va_end (tree exp)
4697 tree valist = CALL_EXPR_ARG (exp, 0);
4699 /* Evaluate for side effects, if needed. I hate macros that don't
4701 if (TREE_SIDE_EFFECTS (valist))
4702 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4707 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4708 builtin rather than just as an assignment in stdarg.h because of the
4709 nastiness of array-type va_list types. */
4712 expand_builtin_va_copy (tree exp)
4715 location_t loc = EXPR_LOCATION (exp);
4717 dst = CALL_EXPR_ARG (exp, 0);
4718 src = CALL_EXPR_ARG (exp, 1);
4720 dst = stabilize_va_list_loc (loc, dst, 1);
4721 src = stabilize_va_list_loc (loc, src, 0);
4723 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4725 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4727 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4728 TREE_SIDE_EFFECTS (t) = 1;
4729 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4733 rtx dstb, srcb, size;
4735 /* Evaluate to pointers. */
4736 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4737 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4738 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4739 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4741 dstb = convert_memory_address (Pmode, dstb);
4742 srcb = convert_memory_address (Pmode, srcb);
4744 /* "Dereference" to BLKmode memories. */
4745 dstb = gen_rtx_MEM (BLKmode, dstb);
4746 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4747 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4748 srcb = gen_rtx_MEM (BLKmode, srcb);
4749 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4750 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4753 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4759 /* Expand a call to one of the builtin functions __builtin_frame_address or
4760 __builtin_return_address. */
4763 expand_builtin_frame_address (tree fndecl, tree exp)
4765 /* The argument must be a nonnegative integer constant.
4766 It counts the number of frames to scan up the stack.
4767 The value is the return address saved in that frame. */
4768 if (call_expr_nargs (exp) == 0)
4769 /* Warning about missing arg was already issued. */
4771 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4773 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4774 error ("invalid argument to %<__builtin_frame_address%>");
4776 error ("invalid argument to %<__builtin_return_address%>");
4782 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4783 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4785 /* Some ports cannot access arbitrary stack frames. */
4788 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4789 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4791 warning (0, "unsupported argument to %<__builtin_return_address%>");
4795 /* For __builtin_frame_address, return what we've got. */
4796 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4800 && ! CONSTANT_P (tem))
4801 tem = copy_to_mode_reg (Pmode, tem);
4806 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4807 we failed and the caller should emit a normal call, otherwise try to get
4808 the result in TARGET, if convenient. */
4811 expand_builtin_alloca (tree exp, rtx target)
4816 /* Emit normal call if marked not-inlineable. */
4817 if (CALL_CANNOT_INLINE_P (exp))
4820 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4823 /* Compute the argument. */
4824 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4826 /* Allocate the desired space. */
4827 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4828 result = convert_memory_address (ptr_mode, result);
4833 /* Expand a call to a bswap builtin with argument ARG0. MODE
4834 is the mode to expand with. */
4837 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4839 enum machine_mode mode;
4843 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4846 arg = CALL_EXPR_ARG (exp, 0);
4847 mode = TYPE_MODE (TREE_TYPE (arg));
4848 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4850 target = expand_unop (mode, bswap_optab, op0, target, 1);
4852 gcc_assert (target);
4854 return convert_to_mode (mode, target, 0);
4857 /* Expand a call to a unary builtin in EXP.
4858 Return NULL_RTX if a normal call should be emitted rather than expanding the
4859 function in-line. If convenient, the result should be placed in TARGET.
4860 SUBTARGET may be used as the target for computing one of EXP's operands. */
4863 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4864 rtx subtarget, optab op_optab)
4868 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4871 /* Compute the argument. */
4872 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4873 VOIDmode, EXPAND_NORMAL);
4874 /* Compute op, into TARGET if possible.
4875 Set TARGET to wherever the result comes back. */
4876 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4877 op_optab, op0, target, 1);
4878 gcc_assert (target);
4880 return convert_to_mode (target_mode, target, 0);
4883 /* Expand a call to __builtin_expect. We just return our argument
4884 as the builtin_expect semantic should've been already executed by
4885 tree branch prediction pass. */
4888 expand_builtin_expect (tree exp, rtx target)
4892 if (call_expr_nargs (exp) < 2)
4894 arg = CALL_EXPR_ARG (exp, 0);
4896 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4897 /* When guessing was done, the hints should be already stripped away. */
4898 gcc_assert (!flag_guess_branch_prob
4899 || optimize == 0 || errorcount || sorrycount);
4904 expand_builtin_trap (void)
4908 emit_insn (gen_trap ());
4911 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4915 /* Expand a call to __builtin_unreachable. We do nothing except emit
4916 a barrier saying that control flow will not pass here.
4918 It is the responsibility of the program being compiled to ensure
4919 that control flow does never reach __builtin_unreachable. */
4921 expand_builtin_unreachable (void)
4926 /* Expand EXP, a call to fabs, fabsf or fabsl.
4927 Return NULL_RTX if a normal call should be emitted rather than expanding
4928 the function inline. If convenient, the result should be placed
4929 in TARGET. SUBTARGET may be used as the target for computing
4933 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4935 enum machine_mode mode;
4939 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4942 arg = CALL_EXPR_ARG (exp, 0);
4943 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4944 mode = TYPE_MODE (TREE_TYPE (arg));
4945 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4946 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4949 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4950 Return NULL is a normal call should be emitted rather than expanding the
4951 function inline. If convenient, the result should be placed in TARGET.
4952 SUBTARGET may be used as the target for computing the operand. */
4955 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4960 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4963 arg = CALL_EXPR_ARG (exp, 0);
4964 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4966 arg = CALL_EXPR_ARG (exp, 1);
4967 op1 = expand_normal (arg);
4969 return expand_copysign (op0, op1, target);
4972 /* Create a new constant string literal and return a char* pointer to it.
4973 The STRING_CST value is the LEN characters at STR. */
4975 build_string_literal (int len, const char *str)
4977 tree t, elem, index, type;
4979 t = build_string (len, str);
4980 elem = build_type_variant (char_type_node, 1, 0);
4981 index = build_index_type (size_int (len - 1));
4982 type = build_array_type (elem, index);
4983 TREE_TYPE (t) = type;
4984 TREE_CONSTANT (t) = 1;
4985 TREE_READONLY (t) = 1;
4986 TREE_STATIC (t) = 1;
4988 type = build_pointer_type (elem);
4989 t = build1 (ADDR_EXPR, type,
4990 build4 (ARRAY_REF, elem,
4991 t, integer_zero_node, NULL_TREE, NULL_TREE));
4995 /* Expand a call to either the entry or exit function profiler. */
4998 expand_builtin_profile_func (bool exitp)
5000 rtx this_rtx, which;
5002 this_rtx = DECL_RTL (current_function_decl);
5003 gcc_assert (MEM_P (this_rtx));
5004 this_rtx = XEXP (this_rtx, 0);
5007 which = profile_function_exit_libfunc;
5009 which = profile_function_entry_libfunc;
5011 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5012 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5019 /* Expand a call to __builtin___clear_cache. */
5022 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5024 #ifndef HAVE_clear_cache
5025 #ifdef CLEAR_INSN_CACHE
5026 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5027 does something. Just do the default expansion to a call to
5031 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5032 does nothing. There is no need to call it. Do nothing. */
5034 #endif /* CLEAR_INSN_CACHE */
5036 /* We have a "clear_cache" insn, and it will handle everything. */
5038 rtx begin_rtx, end_rtx;
5039 enum insn_code icode;
5041 /* We must not expand to a library call. If we did, any
5042 fallback library function in libgcc that might contain a call to
5043 __builtin___clear_cache() would recurse infinitely. */
5044 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5046 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5050 if (HAVE_clear_cache)
5052 icode = CODE_FOR_clear_cache;
5054 begin = CALL_EXPR_ARG (exp, 0);
5055 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5056 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5057 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5058 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5060 end = CALL_EXPR_ARG (exp, 1);
5061 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5062 end_rtx = convert_memory_address (Pmode, end_rtx);
5063 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5064 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5066 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5069 #endif /* HAVE_clear_cache */
5072 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5075 round_trampoline_addr (rtx tramp)
5077 rtx temp, addend, mask;
5079 /* If we don't need too much alignment, we'll have been guaranteed
5080 proper alignment by get_trampoline_type. */
5081 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5084 /* Round address up to desired boundary. */
5085 temp = gen_reg_rtx (Pmode);
5086 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5087 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5089 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5090 temp, 0, OPTAB_LIB_WIDEN);
5091 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5092 temp, 0, OPTAB_LIB_WIDEN);
5098 expand_builtin_init_trampoline (tree exp)
5100 tree t_tramp, t_func, t_chain;
5101 rtx m_tramp, r_tramp, r_chain, tmp;
5103 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5104 POINTER_TYPE, VOID_TYPE))
5107 t_tramp = CALL_EXPR_ARG (exp, 0);
5108 t_func = CALL_EXPR_ARG (exp, 1);
5109 t_chain = CALL_EXPR_ARG (exp, 2);
5111 r_tramp = expand_normal (t_tramp);
5112 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5113 MEM_NOTRAP_P (m_tramp) = 1;
5115 /* The TRAMP argument should be the address of a field within the
5116 local function's FRAME decl. Let's see if we can fill in the
5117 to fill in the MEM_ATTRs for this memory. */
5118 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5119 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5122 tmp = round_trampoline_addr (r_tramp);
5125 m_tramp = change_address (m_tramp, BLKmode, tmp);
5126 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5127 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5130 /* The FUNC argument should be the address of the nested function.
5131 Extract the actual function decl to pass to the hook. */
5132 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5133 t_func = TREE_OPERAND (t_func, 0);
5134 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5136 r_chain = expand_normal (t_chain);
5138 /* Generate insns to initialize the trampoline. */
5139 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5141 trampolines_created = 1;
5146 expand_builtin_adjust_trampoline (tree exp)
5150 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5153 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5154 tramp = round_trampoline_addr (tramp);
5155 if (targetm.calls.trampoline_adjust_address)
5156 tramp = targetm.calls.trampoline_adjust_address (tramp);
5161 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5162 function. The function first checks whether the back end provides
5163 an insn to implement signbit for the respective mode. If not, it
5164 checks whether the floating point format of the value is such that
5165 the sign bit can be extracted. If that is not the case, the
5166 function returns NULL_RTX to indicate that a normal call should be
5167 emitted rather than expanding the function in-line. EXP is the
5168 expression that is a call to the builtin function; if convenient,
5169 the result should be placed in TARGET. */
5171 expand_builtin_signbit (tree exp, rtx target)
5173 const struct real_format *fmt;
5174 enum machine_mode fmode, imode, rmode;
5175 HOST_WIDE_INT hi, lo;
5178 enum insn_code icode;
5180 location_t loc = EXPR_LOCATION (exp);
5182 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5185 arg = CALL_EXPR_ARG (exp, 0);
5186 fmode = TYPE_MODE (TREE_TYPE (arg));
5187 rmode = TYPE_MODE (TREE_TYPE (exp));
5188 fmt = REAL_MODE_FORMAT (fmode);
5190 arg = builtin_save_expr (arg);
5192 /* Expand the argument yielding a RTX expression. */
5193 temp = expand_normal (arg);
5195 /* Check if the back end provides an insn that handles signbit for the
5197 icode = signbit_optab->handlers [(int) fmode].insn_code;
5198 if (icode != CODE_FOR_nothing)
5200 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5201 emit_unop_insn (icode, target, temp, UNKNOWN);
5205 /* For floating point formats without a sign bit, implement signbit
5207 bitpos = fmt->signbit_ro;
5210 /* But we can't do this if the format supports signed zero. */
5211 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5214 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5215 build_real (TREE_TYPE (arg), dconst0));
5216 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5219 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5221 imode = int_mode_for_mode (fmode);
5222 if (imode == BLKmode)
5224 temp = gen_lowpart (imode, temp);
5229 /* Handle targets with different FP word orders. */
5230 if (FLOAT_WORDS_BIG_ENDIAN)
5231 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5233 word = bitpos / BITS_PER_WORD;
5234 temp = operand_subword_force (temp, word, fmode);
5235 bitpos = bitpos % BITS_PER_WORD;
5238 /* Force the intermediate word_mode (or narrower) result into a
5239 register. This avoids attempting to create paradoxical SUBREGs
5240 of floating point modes below. */
5241 temp = force_reg (imode, temp);
5243 /* If the bitpos is within the "result mode" lowpart, the operation
5244 can be implement with a single bitwise AND. Otherwise, we need
5245 a right shift and an AND. */
5247 if (bitpos < GET_MODE_BITSIZE (rmode))
5249 if (bitpos < HOST_BITS_PER_WIDE_INT)
5252 lo = (HOST_WIDE_INT) 1 << bitpos;
5256 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5260 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5261 temp = gen_lowpart (rmode, temp);
5262 temp = expand_binop (rmode, and_optab, temp,
5263 immed_double_const (lo, hi, rmode),
5264 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5268 /* Perform a logical right shift to place the signbit in the least
5269 significant bit, then truncate the result to the desired mode
5270 and mask just this bit. */
5271 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5272 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5273 temp = gen_lowpart (rmode, temp);
5274 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5275 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5281 /* Expand fork or exec calls. TARGET is the desired target of the
5282 call. EXP is the call. FN is the
5283 identificator of the actual function. IGNORE is nonzero if the
5284 value is to be ignored. */
5287 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5292 /* If we are not profiling, just call the function. */
5293 if (!profile_arc_flag)
5296 /* Otherwise call the wrapper. This should be equivalent for the rest of
5297 compiler, so the code does not diverge, and the wrapper may run the
5298 code necessary for keeping the profiling sane. */
5300 switch (DECL_FUNCTION_CODE (fn))
5303 id = get_identifier ("__gcov_fork");
5306 case BUILT_IN_EXECL:
5307 id = get_identifier ("__gcov_execl");
5310 case BUILT_IN_EXECV:
5311 id = get_identifier ("__gcov_execv");
5314 case BUILT_IN_EXECLP:
5315 id = get_identifier ("__gcov_execlp");
5318 case BUILT_IN_EXECLE:
5319 id = get_identifier ("__gcov_execle");
5322 case BUILT_IN_EXECVP:
5323 id = get_identifier ("__gcov_execvp");
5326 case BUILT_IN_EXECVE:
5327 id = get_identifier ("__gcov_execve");
5334 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5335 FUNCTION_DECL, id, TREE_TYPE (fn));
5336 DECL_EXTERNAL (decl) = 1;
5337 TREE_PUBLIC (decl) = 1;
5338 DECL_ARTIFICIAL (decl) = 1;
5339 TREE_NOTHROW (decl) = 1;
5340 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5341 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5342 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5343 return expand_call (call, target, ignore);
5348 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5349 the pointer in these functions is void*, the tree optimizers may remove
5350 casts. The mode computed in expand_builtin isn't reliable either, due
5351 to __sync_bool_compare_and_swap.
5353 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5354 group of builtins. This gives us log2 of the mode size. */
5356 static inline enum machine_mode
5357 get_builtin_sync_mode (int fcode_diff)
5359 /* The size is not negotiable, so ask not to get BLKmode in return
5360 if the target indicates that a smaller size would be better. */
5361 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5364 /* Expand the memory expression LOC and return the appropriate memory operand
5365 for the builtin_sync operations. */
5368 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5372 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5373 addr = convert_memory_address (Pmode, addr);
5375 /* Note that we explicitly do not want any alias information for this
5376 memory, so that we kill all other live memories. Otherwise we don't
5377 satisfy the full barrier semantics of the intrinsic. */
5378 mem = validize_mem (gen_rtx_MEM (mode, addr));
5380 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5381 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5382 MEM_VOLATILE_P (mem) = 1;
5387 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5388 EXP is the CALL_EXPR. CODE is the rtx code
5389 that corresponds to the arithmetic or logical operation from the name;
5390 an exception here is that NOT actually means NAND. TARGET is an optional
5391 place for us to store the results; AFTER is true if this is the
5392 fetch_and_xxx form. IGNORE is true if we don't actually care about
5393 the result of the operation at all. */
5396 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5397 enum rtx_code code, bool after,
5398 rtx target, bool ignore)
5401 enum machine_mode old_mode;
5402 location_t loc = EXPR_LOCATION (exp);
5404 if (code == NOT && warn_sync_nand)
5406 tree fndecl = get_callee_fndecl (exp);
5407 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5409 static bool warned_f_a_n, warned_n_a_f;
5413 case BUILT_IN_FETCH_AND_NAND_1:
5414 case BUILT_IN_FETCH_AND_NAND_2:
5415 case BUILT_IN_FETCH_AND_NAND_4:
5416 case BUILT_IN_FETCH_AND_NAND_8:
5417 case BUILT_IN_FETCH_AND_NAND_16:
5422 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5423 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5424 warned_f_a_n = true;
5427 case BUILT_IN_NAND_AND_FETCH_1:
5428 case BUILT_IN_NAND_AND_FETCH_2:
5429 case BUILT_IN_NAND_AND_FETCH_4:
5430 case BUILT_IN_NAND_AND_FETCH_8:
5431 case BUILT_IN_NAND_AND_FETCH_16:
5436 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5437 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5438 warned_n_a_f = true;
5446 /* Expand the operands. */
5447 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5449 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5450 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5451 of CONST_INTs, where we know the old_mode only from the call argument. */
5452 old_mode = GET_MODE (val);
5453 if (old_mode == VOIDmode)
5454 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5455 val = convert_modes (mode, old_mode, val, 1);
5458 return expand_sync_operation (mem, val, code);
5460 return expand_sync_fetch_operation (mem, val, code, after, target);
5463 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5464 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5465 true if this is the boolean form. TARGET is a place for us to store the
5466 results; this is NOT optional if IS_BOOL is true. */
5469 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5470 bool is_bool, rtx target)
5472 rtx old_val, new_val, mem;
5473 enum machine_mode old_mode;
5475 /* Expand the operands. */
5476 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5479 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5480 mode, EXPAND_NORMAL);
5481 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5482 of CONST_INTs, where we know the old_mode only from the call argument. */
5483 old_mode = GET_MODE (old_val);
5484 if (old_mode == VOIDmode)
5485 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5486 old_val = convert_modes (mode, old_mode, old_val, 1);
5488 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5489 mode, EXPAND_NORMAL);
5490 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5491 of CONST_INTs, where we know the old_mode only from the call argument. */
5492 old_mode = GET_MODE (new_val);
5493 if (old_mode == VOIDmode)
5494 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5495 new_val = convert_modes (mode, old_mode, new_val, 1);
5498 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5500 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5503 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5504 general form is actually an atomic exchange, and some targets only
5505 support a reduced form with the second argument being a constant 1.
5506 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5510 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5514 enum machine_mode old_mode;
5516 /* Expand the operands. */
5517 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5518 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5519 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5520 of CONST_INTs, where we know the old_mode only from the call argument. */
5521 old_mode = GET_MODE (val);
5522 if (old_mode == VOIDmode)
5523 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5524 val = convert_modes (mode, old_mode, val, 1);
5526 return expand_sync_lock_test_and_set (mem, val, target);
5529 /* Expand the __sync_synchronize intrinsic. */
5532 expand_builtin_synchronize (void)
5535 VEC (tree, gc) *v_clobbers;
5537 #ifdef HAVE_memory_barrier
5538 if (HAVE_memory_barrier)
5540 emit_insn (gen_memory_barrier ());
5545 if (synchronize_libfunc != NULL_RTX)
5547 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5551 /* If no explicit memory barrier instruction is available, create an
5552 empty asm stmt with a memory clobber. */
5553 v_clobbers = VEC_alloc (tree, gc, 1);
5554 VEC_quick_push (tree, v_clobbers,
5555 tree_cons (NULL, build_string (6, "memory"), NULL));
5556 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5557 gimple_asm_set_volatile (x, true);
5558 expand_asm_stmt (x);
5561 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5564 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5566 enum insn_code icode;
5568 rtx val = const0_rtx;
5570 /* Expand the operands. */
5571 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5573 /* If there is an explicit operation in the md file, use it. */
5574 icode = sync_lock_release[mode];
5575 if (icode != CODE_FOR_nothing)
5577 if (!insn_data[icode].operand[1].predicate (val, mode))
5578 val = force_reg (mode, val);
5580 insn = GEN_FCN (icode) (mem, val);
5588 /* Otherwise we can implement this operation by emitting a barrier
5589 followed by a store of zero. */
5590 expand_builtin_synchronize ();
5591 emit_move_insn (mem, val);
5594 /* Expand an expression EXP that calls a built-in function,
5595 with result going to TARGET if that's convenient
5596 (and in mode MODE if that's convenient).
5597 SUBTARGET may be used as the target for computing one of EXP's operands.
5598 IGNORE is nonzero if the value is to be ignored. */
5601 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5604 tree fndecl = get_callee_fndecl (exp);
5605 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5606 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5608 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5609 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5611 /* When not optimizing, generate calls to library functions for a certain
5614 && !called_as_built_in (fndecl)
5615 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5616 && fcode != BUILT_IN_ALLOCA
5617 && fcode != BUILT_IN_FREE)
5618 return expand_call (exp, target, ignore);
5620 /* The built-in function expanders test for target == const0_rtx
5621 to determine whether the function's result will be ignored. */
5623 target = const0_rtx;
5625 /* If the result of a pure or const built-in function is ignored, and
5626 none of its arguments are volatile, we can avoid expanding the
5627 built-in call and just evaluate the arguments for side-effects. */
5628 if (target == const0_rtx
5629 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5631 bool volatilep = false;
5633 call_expr_arg_iterator iter;
5635 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5636 if (TREE_THIS_VOLATILE (arg))
5644 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5645 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5652 CASE_FLT_FN (BUILT_IN_FABS):
5653 target = expand_builtin_fabs (exp, target, subtarget);
5658 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5659 target = expand_builtin_copysign (exp, target, subtarget);
5664 /* Just do a normal library call if we were unable to fold
5666 CASE_FLT_FN (BUILT_IN_CABS):
5669 CASE_FLT_FN (BUILT_IN_EXP):
5670 CASE_FLT_FN (BUILT_IN_EXP10):
5671 CASE_FLT_FN (BUILT_IN_POW10):
5672 CASE_FLT_FN (BUILT_IN_EXP2):
5673 CASE_FLT_FN (BUILT_IN_EXPM1):
5674 CASE_FLT_FN (BUILT_IN_LOGB):
5675 CASE_FLT_FN (BUILT_IN_LOG):
5676 CASE_FLT_FN (BUILT_IN_LOG10):
5677 CASE_FLT_FN (BUILT_IN_LOG2):
5678 CASE_FLT_FN (BUILT_IN_LOG1P):
5679 CASE_FLT_FN (BUILT_IN_TAN):
5680 CASE_FLT_FN (BUILT_IN_ASIN):
5681 CASE_FLT_FN (BUILT_IN_ACOS):
5682 CASE_FLT_FN (BUILT_IN_ATAN):
5683 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5684 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5685 because of possible accuracy problems. */
5686 if (! flag_unsafe_math_optimizations)
5688 CASE_FLT_FN (BUILT_IN_SQRT):
5689 CASE_FLT_FN (BUILT_IN_FLOOR):
5690 CASE_FLT_FN (BUILT_IN_CEIL):
5691 CASE_FLT_FN (BUILT_IN_TRUNC):
5692 CASE_FLT_FN (BUILT_IN_ROUND):
5693 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5694 CASE_FLT_FN (BUILT_IN_RINT):
5695 target = expand_builtin_mathfn (exp, target, subtarget);
5700 CASE_FLT_FN (BUILT_IN_ILOGB):
5701 if (! flag_unsafe_math_optimizations)
5703 CASE_FLT_FN (BUILT_IN_ISINF):
5704 CASE_FLT_FN (BUILT_IN_FINITE):
5705 case BUILT_IN_ISFINITE:
5706 case BUILT_IN_ISNORMAL:
5707 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5712 CASE_FLT_FN (BUILT_IN_LCEIL):
5713 CASE_FLT_FN (BUILT_IN_LLCEIL):
5714 CASE_FLT_FN (BUILT_IN_LFLOOR):
5715 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5716 target = expand_builtin_int_roundingfn (exp, target);
5721 CASE_FLT_FN (BUILT_IN_LRINT):
5722 CASE_FLT_FN (BUILT_IN_LLRINT):
5723 CASE_FLT_FN (BUILT_IN_LROUND):
5724 CASE_FLT_FN (BUILT_IN_LLROUND):
5725 target = expand_builtin_int_roundingfn_2 (exp, target);
5730 CASE_FLT_FN (BUILT_IN_POW):
5731 target = expand_builtin_pow (exp, target, subtarget);
5736 CASE_FLT_FN (BUILT_IN_POWI):
5737 target = expand_builtin_powi (exp, target, subtarget);
5742 CASE_FLT_FN (BUILT_IN_ATAN2):
5743 CASE_FLT_FN (BUILT_IN_LDEXP):
5744 CASE_FLT_FN (BUILT_IN_SCALB):
5745 CASE_FLT_FN (BUILT_IN_SCALBN):
5746 CASE_FLT_FN (BUILT_IN_SCALBLN):
5747 if (! flag_unsafe_math_optimizations)
5750 CASE_FLT_FN (BUILT_IN_FMOD):
5751 CASE_FLT_FN (BUILT_IN_REMAINDER):
5752 CASE_FLT_FN (BUILT_IN_DREM):
5753 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5758 CASE_FLT_FN (BUILT_IN_CEXPI):
5759 target = expand_builtin_cexpi (exp, target, subtarget);
5760 gcc_assert (target);
5763 CASE_FLT_FN (BUILT_IN_SIN):
5764 CASE_FLT_FN (BUILT_IN_COS):
5765 if (! flag_unsafe_math_optimizations)
5767 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5772 CASE_FLT_FN (BUILT_IN_SINCOS):
5773 if (! flag_unsafe_math_optimizations)
5775 target = expand_builtin_sincos (exp);
5780 case BUILT_IN_APPLY_ARGS:
5781 return expand_builtin_apply_args ();
5783 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5784 FUNCTION with a copy of the parameters described by
5785 ARGUMENTS, and ARGSIZE. It returns a block of memory
5786 allocated on the stack into which is stored all the registers
5787 that might possibly be used for returning the result of a
5788 function. ARGUMENTS is the value returned by
5789 __builtin_apply_args. ARGSIZE is the number of bytes of
5790 arguments that must be copied. ??? How should this value be
5791 computed? We'll also need a safe worst case value for varargs
5793 case BUILT_IN_APPLY:
5794 if (!validate_arglist (exp, POINTER_TYPE,
5795 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5796 && !validate_arglist (exp, REFERENCE_TYPE,
5797 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5803 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5804 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5805 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5807 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5810 /* __builtin_return (RESULT) causes the function to return the
5811 value described by RESULT. RESULT is address of the block of
5812 memory returned by __builtin_apply. */
5813 case BUILT_IN_RETURN:
5814 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5815 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5818 case BUILT_IN_SAVEREGS:
5819 return expand_builtin_saveregs ();
5821 case BUILT_IN_ARGS_INFO:
5822 return expand_builtin_args_info (exp);
5824 case BUILT_IN_VA_ARG_PACK:
5825 /* All valid uses of __builtin_va_arg_pack () are removed during
5827 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5830 case BUILT_IN_VA_ARG_PACK_LEN:
5831 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5833 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5836 /* Return the address of the first anonymous stack arg. */
5837 case BUILT_IN_NEXT_ARG:
5838 if (fold_builtin_next_arg (exp, false))
5840 return expand_builtin_next_arg ();
5842 case BUILT_IN_CLEAR_CACHE:
5843 target = expand_builtin___clear_cache (exp);
5848 case BUILT_IN_CLASSIFY_TYPE:
5849 return expand_builtin_classify_type (exp);
5851 case BUILT_IN_CONSTANT_P:
5854 case BUILT_IN_FRAME_ADDRESS:
5855 case BUILT_IN_RETURN_ADDRESS:
5856 return expand_builtin_frame_address (fndecl, exp);
5858 /* Returns the address of the area where the structure is returned.
5860 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5861 if (call_expr_nargs (exp) != 0
5862 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5863 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5866 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5868 case BUILT_IN_ALLOCA:
5869 target = expand_builtin_alloca (exp, target);
5874 case BUILT_IN_STACK_SAVE:
5875 return expand_stack_save ();
5877 case BUILT_IN_STACK_RESTORE:
5878 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5881 case BUILT_IN_BSWAP32:
5882 case BUILT_IN_BSWAP64:
5883 target = expand_builtin_bswap (exp, target, subtarget);
5889 CASE_INT_FN (BUILT_IN_FFS):
5890 case BUILT_IN_FFSIMAX:
5891 target = expand_builtin_unop (target_mode, exp, target,
5892 subtarget, ffs_optab);
5897 CASE_INT_FN (BUILT_IN_CLZ):
5898 case BUILT_IN_CLZIMAX:
5899 target = expand_builtin_unop (target_mode, exp, target,
5900 subtarget, clz_optab);
5905 CASE_INT_FN (BUILT_IN_CTZ):
5906 case BUILT_IN_CTZIMAX:
5907 target = expand_builtin_unop (target_mode, exp, target,
5908 subtarget, ctz_optab);
5913 CASE_INT_FN (BUILT_IN_POPCOUNT):
5914 case BUILT_IN_POPCOUNTIMAX:
5915 target = expand_builtin_unop (target_mode, exp, target,
5916 subtarget, popcount_optab);
5921 CASE_INT_FN (BUILT_IN_PARITY):
5922 case BUILT_IN_PARITYIMAX:
5923 target = expand_builtin_unop (target_mode, exp, target,
5924 subtarget, parity_optab);
5929 case BUILT_IN_STRLEN:
5930 target = expand_builtin_strlen (exp, target, target_mode);
5935 case BUILT_IN_STRCPY:
5936 target = expand_builtin_strcpy (exp, target);
5941 case BUILT_IN_STRNCPY:
5942 target = expand_builtin_strncpy (exp, target);
5947 case BUILT_IN_STPCPY:
5948 target = expand_builtin_stpcpy (exp, target, mode);
5953 case BUILT_IN_MEMCPY:
5954 target = expand_builtin_memcpy (exp, target);
5959 case BUILT_IN_MEMPCPY:
5960 target = expand_builtin_mempcpy (exp, target, mode);
5965 case BUILT_IN_MEMSET:
5966 target = expand_builtin_memset (exp, target, mode);
5971 case BUILT_IN_BZERO:
5972 target = expand_builtin_bzero (exp);
5977 case BUILT_IN_STRCMP:
5978 target = expand_builtin_strcmp (exp, target);
5983 case BUILT_IN_STRNCMP:
5984 target = expand_builtin_strncmp (exp, target, mode);
5990 case BUILT_IN_MEMCMP:
5991 target = expand_builtin_memcmp (exp, target, mode);
5996 case BUILT_IN_SETJMP:
5997 /* This should have been lowered to the builtins below. */
6000 case BUILT_IN_SETJMP_SETUP:
6001 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6002 and the receiver label. */
6003 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6005 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6006 VOIDmode, EXPAND_NORMAL);
6007 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6008 rtx label_r = label_rtx (label);
6010 /* This is copied from the handling of non-local gotos. */
6011 expand_builtin_setjmp_setup (buf_addr, label_r);
6012 nonlocal_goto_handler_labels
6013 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6014 nonlocal_goto_handler_labels);
6015 /* ??? Do not let expand_label treat us as such since we would
6016 not want to be both on the list of non-local labels and on
6017 the list of forced labels. */
6018 FORCED_LABEL (label) = 0;
6023 case BUILT_IN_SETJMP_DISPATCHER:
6024 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6025 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6027 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6028 rtx label_r = label_rtx (label);
6030 /* Remove the dispatcher label from the list of non-local labels
6031 since the receiver labels have been added to it above. */
6032 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6037 case BUILT_IN_SETJMP_RECEIVER:
6038 /* __builtin_setjmp_receiver is passed the receiver label. */
6039 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6041 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6042 rtx label_r = label_rtx (label);
6044 expand_builtin_setjmp_receiver (label_r);
6049 /* __builtin_longjmp is passed a pointer to an array of five words.
6050 It's similar to the C library longjmp function but works with
6051 __builtin_setjmp above. */
6052 case BUILT_IN_LONGJMP:
6053 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6055 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6056 VOIDmode, EXPAND_NORMAL);
6057 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6059 if (value != const1_rtx)
6061 error ("%<__builtin_longjmp%> second argument must be 1");
6065 expand_builtin_longjmp (buf_addr, value);
6070 case BUILT_IN_NONLOCAL_GOTO:
6071 target = expand_builtin_nonlocal_goto (exp);
6076 /* This updates the setjmp buffer that is its argument with the value
6077 of the current stack pointer. */
6078 case BUILT_IN_UPDATE_SETJMP_BUF:
6079 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6082 = expand_normal (CALL_EXPR_ARG (exp, 0));
6084 expand_builtin_update_setjmp_buf (buf_addr);
6090 expand_builtin_trap ();
6093 case BUILT_IN_UNREACHABLE:
6094 expand_builtin_unreachable ();
6097 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6098 case BUILT_IN_SIGNBITD32:
6099 case BUILT_IN_SIGNBITD64:
6100 case BUILT_IN_SIGNBITD128:
6101 target = expand_builtin_signbit (exp, target);
6106 /* Various hooks for the DWARF 2 __throw routine. */
6107 case BUILT_IN_UNWIND_INIT:
6108 expand_builtin_unwind_init ();
6110 case BUILT_IN_DWARF_CFA:
6111 return virtual_cfa_rtx;
6112 #ifdef DWARF2_UNWIND_INFO
6113 case BUILT_IN_DWARF_SP_COLUMN:
6114 return expand_builtin_dwarf_sp_column ();
6115 case BUILT_IN_INIT_DWARF_REG_SIZES:
6116 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6119 case BUILT_IN_FROB_RETURN_ADDR:
6120 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6121 case BUILT_IN_EXTRACT_RETURN_ADDR:
6122 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6123 case BUILT_IN_EH_RETURN:
6124 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6125 CALL_EXPR_ARG (exp, 1));
6127 #ifdef EH_RETURN_DATA_REGNO
6128 case BUILT_IN_EH_RETURN_DATA_REGNO:
6129 return expand_builtin_eh_return_data_regno (exp);
6131 case BUILT_IN_EXTEND_POINTER:
6132 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6133 case BUILT_IN_EH_POINTER:
6134 return expand_builtin_eh_pointer (exp);
6135 case BUILT_IN_EH_FILTER:
6136 return expand_builtin_eh_filter (exp);
6137 case BUILT_IN_EH_COPY_VALUES:
6138 return expand_builtin_eh_copy_values (exp);
6140 case BUILT_IN_VA_START:
6141 return expand_builtin_va_start (exp);
6142 case BUILT_IN_VA_END:
6143 return expand_builtin_va_end (exp);
6144 case BUILT_IN_VA_COPY:
6145 return expand_builtin_va_copy (exp);
6146 case BUILT_IN_EXPECT:
6147 return expand_builtin_expect (exp, target);
6148 case BUILT_IN_PREFETCH:
6149 expand_builtin_prefetch (exp);
6152 case BUILT_IN_PROFILE_FUNC_ENTER:
6153 return expand_builtin_profile_func (false);
6154 case BUILT_IN_PROFILE_FUNC_EXIT:
6155 return expand_builtin_profile_func (true);
6157 case BUILT_IN_INIT_TRAMPOLINE:
6158 return expand_builtin_init_trampoline (exp);
6159 case BUILT_IN_ADJUST_TRAMPOLINE:
6160 return expand_builtin_adjust_trampoline (exp);
6163 case BUILT_IN_EXECL:
6164 case BUILT_IN_EXECV:
6165 case BUILT_IN_EXECLP:
6166 case BUILT_IN_EXECLE:
6167 case BUILT_IN_EXECVP:
6168 case BUILT_IN_EXECVE:
6169 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6174 case BUILT_IN_FETCH_AND_ADD_1:
6175 case BUILT_IN_FETCH_AND_ADD_2:
6176 case BUILT_IN_FETCH_AND_ADD_4:
6177 case BUILT_IN_FETCH_AND_ADD_8:
6178 case BUILT_IN_FETCH_AND_ADD_16:
6179 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6180 target = expand_builtin_sync_operation (mode, exp, PLUS,
6181 false, target, ignore);
6186 case BUILT_IN_FETCH_AND_SUB_1:
6187 case BUILT_IN_FETCH_AND_SUB_2:
6188 case BUILT_IN_FETCH_AND_SUB_4:
6189 case BUILT_IN_FETCH_AND_SUB_8:
6190 case BUILT_IN_FETCH_AND_SUB_16:
6191 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6192 target = expand_builtin_sync_operation (mode, exp, MINUS,
6193 false, target, ignore);
6198 case BUILT_IN_FETCH_AND_OR_1:
6199 case BUILT_IN_FETCH_AND_OR_2:
6200 case BUILT_IN_FETCH_AND_OR_4:
6201 case BUILT_IN_FETCH_AND_OR_8:
6202 case BUILT_IN_FETCH_AND_OR_16:
6203 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6204 target = expand_builtin_sync_operation (mode, exp, IOR,
6205 false, target, ignore);
6210 case BUILT_IN_FETCH_AND_AND_1:
6211 case BUILT_IN_FETCH_AND_AND_2:
6212 case BUILT_IN_FETCH_AND_AND_4:
6213 case BUILT_IN_FETCH_AND_AND_8:
6214 case BUILT_IN_FETCH_AND_AND_16:
6215 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6216 target = expand_builtin_sync_operation (mode, exp, AND,
6217 false, target, ignore);
6222 case BUILT_IN_FETCH_AND_XOR_1:
6223 case BUILT_IN_FETCH_AND_XOR_2:
6224 case BUILT_IN_FETCH_AND_XOR_4:
6225 case BUILT_IN_FETCH_AND_XOR_8:
6226 case BUILT_IN_FETCH_AND_XOR_16:
6227 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6228 target = expand_builtin_sync_operation (mode, exp, XOR,
6229 false, target, ignore);
6234 case BUILT_IN_FETCH_AND_NAND_1:
6235 case BUILT_IN_FETCH_AND_NAND_2:
6236 case BUILT_IN_FETCH_AND_NAND_4:
6237 case BUILT_IN_FETCH_AND_NAND_8:
6238 case BUILT_IN_FETCH_AND_NAND_16:
6239 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6240 target = expand_builtin_sync_operation (mode, exp, NOT,
6241 false, target, ignore);
6246 case BUILT_IN_ADD_AND_FETCH_1:
6247 case BUILT_IN_ADD_AND_FETCH_2:
6248 case BUILT_IN_ADD_AND_FETCH_4:
6249 case BUILT_IN_ADD_AND_FETCH_8:
6250 case BUILT_IN_ADD_AND_FETCH_16:
6251 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6252 target = expand_builtin_sync_operation (mode, exp, PLUS,
6253 true, target, ignore);
6258 case BUILT_IN_SUB_AND_FETCH_1:
6259 case BUILT_IN_SUB_AND_FETCH_2:
6260 case BUILT_IN_SUB_AND_FETCH_4:
6261 case BUILT_IN_SUB_AND_FETCH_8:
6262 case BUILT_IN_SUB_AND_FETCH_16:
6263 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6264 target = expand_builtin_sync_operation (mode, exp, MINUS,
6265 true, target, ignore);
6270 case BUILT_IN_OR_AND_FETCH_1:
6271 case BUILT_IN_OR_AND_FETCH_2:
6272 case BUILT_IN_OR_AND_FETCH_4:
6273 case BUILT_IN_OR_AND_FETCH_8:
6274 case BUILT_IN_OR_AND_FETCH_16:
6275 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6276 target = expand_builtin_sync_operation (mode, exp, IOR,
6277 true, target, ignore);
6282 case BUILT_IN_AND_AND_FETCH_1:
6283 case BUILT_IN_AND_AND_FETCH_2:
6284 case BUILT_IN_AND_AND_FETCH_4:
6285 case BUILT_IN_AND_AND_FETCH_8:
6286 case BUILT_IN_AND_AND_FETCH_16:
6287 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6288 target = expand_builtin_sync_operation (mode, exp, AND,
6289 true, target, ignore);
6294 case BUILT_IN_XOR_AND_FETCH_1:
6295 case BUILT_IN_XOR_AND_FETCH_2:
6296 case BUILT_IN_XOR_AND_FETCH_4:
6297 case BUILT_IN_XOR_AND_FETCH_8:
6298 case BUILT_IN_XOR_AND_FETCH_16:
6299 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6300 target = expand_builtin_sync_operation (mode, exp, XOR,
6301 true, target, ignore);
6306 case BUILT_IN_NAND_AND_FETCH_1:
6307 case BUILT_IN_NAND_AND_FETCH_2:
6308 case BUILT_IN_NAND_AND_FETCH_4:
6309 case BUILT_IN_NAND_AND_FETCH_8:
6310 case BUILT_IN_NAND_AND_FETCH_16:
6311 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6312 target = expand_builtin_sync_operation (mode, exp, NOT,
6313 true, target, ignore);
6318 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6319 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6320 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6321 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6322 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6323 if (mode == VOIDmode)
6324 mode = TYPE_MODE (boolean_type_node);
6325 if (!target || !register_operand (target, mode))
6326 target = gen_reg_rtx (mode);
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6329 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6334 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6335 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6336 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6337 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6338 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6339 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6340 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6345 case BUILT_IN_LOCK_TEST_AND_SET_1:
6346 case BUILT_IN_LOCK_TEST_AND_SET_2:
6347 case BUILT_IN_LOCK_TEST_AND_SET_4:
6348 case BUILT_IN_LOCK_TEST_AND_SET_8:
6349 case BUILT_IN_LOCK_TEST_AND_SET_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6351 target = expand_builtin_lock_test_and_set (mode, exp, target);
6356 case BUILT_IN_LOCK_RELEASE_1:
6357 case BUILT_IN_LOCK_RELEASE_2:
6358 case BUILT_IN_LOCK_RELEASE_4:
6359 case BUILT_IN_LOCK_RELEASE_8:
6360 case BUILT_IN_LOCK_RELEASE_16:
6361 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6362 expand_builtin_lock_release (mode, exp);
6365 case BUILT_IN_SYNCHRONIZE:
6366 expand_builtin_synchronize ();
6369 case BUILT_IN_OBJECT_SIZE:
6370 return expand_builtin_object_size (exp);
6372 case BUILT_IN_MEMCPY_CHK:
6373 case BUILT_IN_MEMPCPY_CHK:
6374 case BUILT_IN_MEMMOVE_CHK:
6375 case BUILT_IN_MEMSET_CHK:
6376 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6381 case BUILT_IN_STRCPY_CHK:
6382 case BUILT_IN_STPCPY_CHK:
6383 case BUILT_IN_STRNCPY_CHK:
6384 case BUILT_IN_STRCAT_CHK:
6385 case BUILT_IN_STRNCAT_CHK:
6386 case BUILT_IN_SNPRINTF_CHK:
6387 case BUILT_IN_VSNPRINTF_CHK:
6388 maybe_emit_chk_warning (exp, fcode);
6391 case BUILT_IN_SPRINTF_CHK:
6392 case BUILT_IN_VSPRINTF_CHK:
6393 maybe_emit_sprintf_chk_warning (exp, fcode);
6397 maybe_emit_free_warning (exp);
6400 default: /* just do library call, if unknown builtin */
6404 /* The switch statement above can drop through to cause the function
6405 to be called normally. */
6406 return expand_call (exp, target, ignore);
6409 /* Determine whether a tree node represents a call to a built-in
6410 function. If the tree T is a call to a built-in function with
6411 the right number of arguments of the appropriate types, return
6412 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6413 Otherwise the return value is END_BUILTINS. */
6415 enum built_in_function
6416 builtin_mathfn_code (const_tree t)
6418 const_tree fndecl, arg, parmlist;
6419 const_tree argtype, parmtype;
6420 const_call_expr_arg_iterator iter;
6422 if (TREE_CODE (t) != CALL_EXPR
6423 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6424 return END_BUILTINS;
6426 fndecl = get_callee_fndecl (t);
6427 if (fndecl == NULL_TREE
6428 || TREE_CODE (fndecl) != FUNCTION_DECL
6429 || ! DECL_BUILT_IN (fndecl)
6430 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6431 return END_BUILTINS;
6433 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6434 init_const_call_expr_arg_iterator (t, &iter);
6435 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6437 /* If a function doesn't take a variable number of arguments,
6438 the last element in the list will have type `void'. */
6439 parmtype = TREE_VALUE (parmlist);
6440 if (VOID_TYPE_P (parmtype))
6442 if (more_const_call_expr_args_p (&iter))
6443 return END_BUILTINS;
6444 return DECL_FUNCTION_CODE (fndecl);
6447 if (! more_const_call_expr_args_p (&iter))
6448 return END_BUILTINS;
6450 arg = next_const_call_expr_arg (&iter);
6451 argtype = TREE_TYPE (arg);
6453 if (SCALAR_FLOAT_TYPE_P (parmtype))
6455 if (! SCALAR_FLOAT_TYPE_P (argtype))
6456 return END_BUILTINS;
6458 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6460 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6461 return END_BUILTINS;
6463 else if (POINTER_TYPE_P (parmtype))
6465 if (! POINTER_TYPE_P (argtype))
6466 return END_BUILTINS;
6468 else if (INTEGRAL_TYPE_P (parmtype))
6470 if (! INTEGRAL_TYPE_P (argtype))
6471 return END_BUILTINS;
6474 return END_BUILTINS;
6477 /* Variable-length argument list. */
6478 return DECL_FUNCTION_CODE (fndecl);
6481 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6482 evaluate to a constant. */
6485 fold_builtin_constant_p (tree arg)
6487 /* We return 1 for a numeric type that's known to be a constant
6488 value at compile-time or for an aggregate type that's a
6489 literal constant. */
6492 /* If we know this is a constant, emit the constant of one. */
6493 if (CONSTANT_CLASS_P (arg)
6494 || (TREE_CODE (arg) == CONSTRUCTOR
6495 && TREE_CONSTANT (arg)))
6496 return integer_one_node;
6497 if (TREE_CODE (arg) == ADDR_EXPR)
6499 tree op = TREE_OPERAND (arg, 0);
6500 if (TREE_CODE (op) == STRING_CST
6501 || (TREE_CODE (op) == ARRAY_REF
6502 && integer_zerop (TREE_OPERAND (op, 1))
6503 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6504 return integer_one_node;
6507 /* If this expression has side effects, show we don't know it to be a
6508 constant. Likewise if it's a pointer or aggregate type since in
6509 those case we only want literals, since those are only optimized
6510 when generating RTL, not later.
6511 And finally, if we are compiling an initializer, not code, we
6512 need to return a definite result now; there's not going to be any
6513 more optimization done. */
6514 if (TREE_SIDE_EFFECTS (arg)
6515 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6516 || POINTER_TYPE_P (TREE_TYPE (arg))
6518 || folding_initializer)
6519 return integer_zero_node;
6524 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6525 return it as a truthvalue. */
6528 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6530 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6532 fn = built_in_decls[BUILT_IN_EXPECT];
6533 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6534 ret_type = TREE_TYPE (TREE_TYPE (fn));
6535 pred_type = TREE_VALUE (arg_types);
6536 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6538 pred = fold_convert_loc (loc, pred_type, pred);
6539 expected = fold_convert_loc (loc, expected_type, expected);
6540 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6542 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6543 build_int_cst (ret_type, 0));
6546 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6547 NULL_TREE if no simplification is possible. */
6550 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6553 enum tree_code code;
6555 /* If this is a builtin_expect within a builtin_expect keep the
6556 inner one. See through a comparison against a constant. It
6557 might have been added to create a thruthvalue. */
6559 if (COMPARISON_CLASS_P (inner)
6560 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6561 inner = TREE_OPERAND (inner, 0);
6563 if (TREE_CODE (inner) == CALL_EXPR
6564 && (fndecl = get_callee_fndecl (inner))
6565 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6566 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6569 /* Distribute the expected value over short-circuiting operators.
6570 See through the cast from truthvalue_type_node to long. */
6572 while (TREE_CODE (inner) == NOP_EXPR
6573 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6574 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6575 inner = TREE_OPERAND (inner, 0);
6577 code = TREE_CODE (inner);
6578 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6580 tree op0 = TREE_OPERAND (inner, 0);
6581 tree op1 = TREE_OPERAND (inner, 1);
6583 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6584 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6585 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6587 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6590 /* If the argument isn't invariant then there's nothing else we can do. */
6591 if (!TREE_CONSTANT (arg0))
6594 /* If we expect that a comparison against the argument will fold to
6595 a constant return the constant. In practice, this means a true
6596 constant or the address of a non-weak symbol. */
6599 if (TREE_CODE (inner) == ADDR_EXPR)
6603 inner = TREE_OPERAND (inner, 0);
6605 while (TREE_CODE (inner) == COMPONENT_REF
6606 || TREE_CODE (inner) == ARRAY_REF);
6607 if ((TREE_CODE (inner) == VAR_DECL
6608 || TREE_CODE (inner) == FUNCTION_DECL)
6609 && DECL_WEAK (inner))
6613 /* Otherwise, ARG0 already has the proper type for the return value. */
6617 /* Fold a call to __builtin_classify_type with argument ARG. */
6620 fold_builtin_classify_type (tree arg)
6623 return build_int_cst (NULL_TREE, no_type_class);
6625 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6628 /* Fold a call to __builtin_strlen with argument ARG. */
6631 fold_builtin_strlen (location_t loc, tree type, tree arg)
6633 if (!validate_arg (arg, POINTER_TYPE))
6637 tree len = c_strlen (arg, 0);
6640 return fold_convert_loc (loc, type, len);
6646 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6649 fold_builtin_inf (location_t loc, tree type, int warn)
6651 REAL_VALUE_TYPE real;
6653 /* __builtin_inff is intended to be usable to define INFINITY on all
6654 targets. If an infinity is not available, INFINITY expands "to a
6655 positive constant of type float that overflows at translation
6656 time", footnote "In this case, using INFINITY will violate the
6657 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6658 Thus we pedwarn to ensure this constraint violation is
6660 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6661 pedwarn (loc, 0, "target format does not support infinity");
6664 return build_real (type, real);
6667 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6670 fold_builtin_nan (tree arg, tree type, int quiet)
6672 REAL_VALUE_TYPE real;
6675 if (!validate_arg (arg, POINTER_TYPE))
6677 str = c_getstr (arg);
6681 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6684 return build_real (type, real);
6687 /* Return true if the floating point expression T has an integer value.
6688 We also allow +Inf, -Inf and NaN to be considered integer values. */
6691 integer_valued_real_p (tree t)
6693 switch (TREE_CODE (t))
6700 return integer_valued_real_p (TREE_OPERAND (t, 0));
6705 return integer_valued_real_p (TREE_OPERAND (t, 1));
6712 return integer_valued_real_p (TREE_OPERAND (t, 0))
6713 && integer_valued_real_p (TREE_OPERAND (t, 1));
6716 return integer_valued_real_p (TREE_OPERAND (t, 1))
6717 && integer_valued_real_p (TREE_OPERAND (t, 2));
6720 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6724 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6725 if (TREE_CODE (type) == INTEGER_TYPE)
6727 if (TREE_CODE (type) == REAL_TYPE)
6728 return integer_valued_real_p (TREE_OPERAND (t, 0));
6733 switch (builtin_mathfn_code (t))
6735 CASE_FLT_FN (BUILT_IN_CEIL):
6736 CASE_FLT_FN (BUILT_IN_FLOOR):
6737 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6738 CASE_FLT_FN (BUILT_IN_RINT):
6739 CASE_FLT_FN (BUILT_IN_ROUND):
6740 CASE_FLT_FN (BUILT_IN_TRUNC):
6743 CASE_FLT_FN (BUILT_IN_FMIN):
6744 CASE_FLT_FN (BUILT_IN_FMAX):
6745 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6746 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6759 /* FNDECL is assumed to be a builtin where truncation can be propagated
6760 across (for instance floor((double)f) == (double)floorf (f).
6761 Do the transformation for a call with argument ARG. */
6764 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6766 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6768 if (!validate_arg (arg, REAL_TYPE))
6771 /* Integer rounding functions are idempotent. */
6772 if (fcode == builtin_mathfn_code (arg))
6775 /* If argument is already integer valued, and we don't need to worry
6776 about setting errno, there's no need to perform rounding. */
6777 if (! flag_errno_math && integer_valued_real_p (arg))
6782 tree arg0 = strip_float_extensions (arg);
6783 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6784 tree newtype = TREE_TYPE (arg0);
6787 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6788 && (decl = mathfn_built_in (newtype, fcode)))
6789 return fold_convert_loc (loc, ftype,
6790 build_call_expr_loc (loc, decl, 1,
6791 fold_convert_loc (loc,
6798 /* FNDECL is assumed to be builtin which can narrow the FP type of
6799 the argument, for instance lround((double)f) -> lroundf (f).
6800 Do the transformation for a call with argument ARG. */
6803 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6805 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6807 if (!validate_arg (arg, REAL_TYPE))
6810 /* If argument is already integer valued, and we don't need to worry
6811 about setting errno, there's no need to perform rounding. */
6812 if (! flag_errno_math && integer_valued_real_p (arg))
6813 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6814 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6818 tree ftype = TREE_TYPE (arg);
6819 tree arg0 = strip_float_extensions (arg);
6820 tree newtype = TREE_TYPE (arg0);
6823 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6824 && (decl = mathfn_built_in (newtype, fcode)))
6825 return build_call_expr_loc (loc, decl, 1,
6826 fold_convert_loc (loc, newtype, arg0));
6829 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6830 sizeof (long long) == sizeof (long). */
6831 if (TYPE_PRECISION (long_long_integer_type_node)
6832 == TYPE_PRECISION (long_integer_type_node))
6834 tree newfn = NULL_TREE;
6837 CASE_FLT_FN (BUILT_IN_LLCEIL):
6838 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6841 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6842 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6845 CASE_FLT_FN (BUILT_IN_LLROUND):
6846 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6849 CASE_FLT_FN (BUILT_IN_LLRINT):
6850 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6859 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6860 return fold_convert_loc (loc,
6861 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6868 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6869 return type. Return NULL_TREE if no simplification can be made. */
6872 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6876 if (!validate_arg (arg, COMPLEX_TYPE)
6877 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6880 /* Calculate the result when the argument is a constant. */
6881 if (TREE_CODE (arg) == COMPLEX_CST
6882 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6886 if (TREE_CODE (arg) == COMPLEX_EXPR)
6888 tree real = TREE_OPERAND (arg, 0);
6889 tree imag = TREE_OPERAND (arg, 1);
6891 /* If either part is zero, cabs is fabs of the other. */
6892 if (real_zerop (real))
6893 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6894 if (real_zerop (imag))
6895 return fold_build1_loc (loc, ABS_EXPR, type, real);
6897 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6898 if (flag_unsafe_math_optimizations
6899 && operand_equal_p (real, imag, OEP_PURE_SAME))
6901 const REAL_VALUE_TYPE sqrt2_trunc
6902 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6904 return fold_build2_loc (loc, MULT_EXPR, type,
6905 fold_build1_loc (loc, ABS_EXPR, type, real),
6906 build_real (type, sqrt2_trunc));
6910 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6911 if (TREE_CODE (arg) == NEGATE_EXPR
6912 || TREE_CODE (arg) == CONJ_EXPR)
6913 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6915 /* Don't do this when optimizing for size. */
6916 if (flag_unsafe_math_optimizations
6917 && optimize && optimize_function_for_speed_p (cfun))
6919 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6921 if (sqrtfn != NULL_TREE)
6923 tree rpart, ipart, result;
6925 arg = builtin_save_expr (arg);
6927 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6928 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6930 rpart = builtin_save_expr (rpart);
6931 ipart = builtin_save_expr (ipart);
6933 result = fold_build2_loc (loc, PLUS_EXPR, type,
6934 fold_build2_loc (loc, MULT_EXPR, type,
6936 fold_build2_loc (loc, MULT_EXPR, type,
6939 return build_call_expr_loc (loc, sqrtfn, 1, result);
6946 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6947 Return NULL_TREE if no simplification can be made. */
6950 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6953 enum built_in_function fcode;
6956 if (!validate_arg (arg, REAL_TYPE))
6959 /* Calculate the result when the argument is a constant. */
6960 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6963 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6964 fcode = builtin_mathfn_code (arg);
6965 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6967 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6968 arg = fold_build2_loc (loc, MULT_EXPR, type,
6969 CALL_EXPR_ARG (arg, 0),
6970 build_real (type, dconsthalf));
6971 return build_call_expr_loc (loc, expfn, 1, arg);
6974 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6975 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6977 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6981 tree arg0 = CALL_EXPR_ARG (arg, 0);
6983 /* The inner root was either sqrt or cbrt. */
6984 /* This was a conditional expression but it triggered a bug
6986 REAL_VALUE_TYPE dconstroot;
6987 if (BUILTIN_SQRT_P (fcode))
6988 dconstroot = dconsthalf;
6990 dconstroot = dconst_third ();
6992 /* Adjust for the outer root. */
6993 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6994 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6995 tree_root = build_real (type, dconstroot);
6996 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7000 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7001 if (flag_unsafe_math_optimizations
7002 && (fcode == BUILT_IN_POW
7003 || fcode == BUILT_IN_POWF
7004 || fcode == BUILT_IN_POWL))
7006 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7007 tree arg0 = CALL_EXPR_ARG (arg, 0);
7008 tree arg1 = CALL_EXPR_ARG (arg, 1);
7010 if (!tree_expr_nonnegative_p (arg0))
7011 arg0 = build1 (ABS_EXPR, type, arg0);
7012 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7013 build_real (type, dconsthalf));
7014 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7020 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7021 Return NULL_TREE if no simplification can be made. */
7024 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7026 const enum built_in_function fcode = builtin_mathfn_code (arg);
7029 if (!validate_arg (arg, REAL_TYPE))
7032 /* Calculate the result when the argument is a constant. */
7033 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7036 if (flag_unsafe_math_optimizations)
7038 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7039 if (BUILTIN_EXPONENT_P (fcode))
7041 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7042 const REAL_VALUE_TYPE third_trunc =
7043 real_value_truncate (TYPE_MODE (type), dconst_third ());
7044 arg = fold_build2_loc (loc, MULT_EXPR, type,
7045 CALL_EXPR_ARG (arg, 0),
7046 build_real (type, third_trunc));
7047 return build_call_expr_loc (loc, expfn, 1, arg);
7050 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7051 if (BUILTIN_SQRT_P (fcode))
7053 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7057 tree arg0 = CALL_EXPR_ARG (arg, 0);
7059 REAL_VALUE_TYPE dconstroot = dconst_third ();
7061 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7062 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7063 tree_root = build_real (type, dconstroot);
7064 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7068 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7069 if (BUILTIN_CBRT_P (fcode))
7071 tree arg0 = CALL_EXPR_ARG (arg, 0);
7072 if (tree_expr_nonnegative_p (arg0))
7074 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7079 REAL_VALUE_TYPE dconstroot;
7081 real_arithmetic (&dconstroot, MULT_EXPR,
7082 dconst_third_ptr (), dconst_third_ptr ());
7083 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7084 tree_root = build_real (type, dconstroot);
7085 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7090 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7091 if (fcode == BUILT_IN_POW
7092 || fcode == BUILT_IN_POWF
7093 || fcode == BUILT_IN_POWL)
7095 tree arg00 = CALL_EXPR_ARG (arg, 0);
7096 tree arg01 = CALL_EXPR_ARG (arg, 1);
7097 if (tree_expr_nonnegative_p (arg00))
7099 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7100 const REAL_VALUE_TYPE dconstroot
7101 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7102 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7103 build_real (type, dconstroot));
7104 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7111 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7112 TYPE is the type of the return value. Return NULL_TREE if no
7113 simplification can be made. */
7116 fold_builtin_cos (location_t loc,
7117 tree arg, tree type, tree fndecl)
7121 if (!validate_arg (arg, REAL_TYPE))
7124 /* Calculate the result when the argument is a constant. */
7125 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7128 /* Optimize cos(-x) into cos (x). */
7129 if ((narg = fold_strip_sign_ops (arg)))
7130 return build_call_expr_loc (loc, fndecl, 1, narg);
7135 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7136 Return NULL_TREE if no simplification can be made. */
7139 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7141 if (validate_arg (arg, REAL_TYPE))
7145 /* Calculate the result when the argument is a constant. */
7146 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7149 /* Optimize cosh(-x) into cosh (x). */
7150 if ((narg = fold_strip_sign_ops (arg)))
7151 return build_call_expr_loc (loc, fndecl, 1, narg);
7157 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7158 argument ARG. TYPE is the type of the return value. Return
7159 NULL_TREE if no simplification can be made. */
7162 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7165 if (validate_arg (arg, COMPLEX_TYPE)
7166 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7170 /* Calculate the result when the argument is a constant. */
7171 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7174 /* Optimize fn(-x) into fn(x). */
7175 if ((tmp = fold_strip_sign_ops (arg)))
7176 return build_call_expr_loc (loc, fndecl, 1, tmp);
7182 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7183 Return NULL_TREE if no simplification can be made. */
7186 fold_builtin_tan (tree arg, tree type)
7188 enum built_in_function fcode;
7191 if (!validate_arg (arg, REAL_TYPE))
7194 /* Calculate the result when the argument is a constant. */
7195 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7198 /* Optimize tan(atan(x)) = x. */
7199 fcode = builtin_mathfn_code (arg);
7200 if (flag_unsafe_math_optimizations
7201 && (fcode == BUILT_IN_ATAN
7202 || fcode == BUILT_IN_ATANF
7203 || fcode == BUILT_IN_ATANL))
7204 return CALL_EXPR_ARG (arg, 0);
7209 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7210 NULL_TREE if no simplification can be made. */
7213 fold_builtin_sincos (location_t loc,
7214 tree arg0, tree arg1, tree arg2)
7219 if (!validate_arg (arg0, REAL_TYPE)
7220 || !validate_arg (arg1, POINTER_TYPE)
7221 || !validate_arg (arg2, POINTER_TYPE))
7224 type = TREE_TYPE (arg0);
7226 /* Calculate the result when the argument is a constant. */
7227 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7230 /* Canonicalize sincos to cexpi. */
7231 if (!TARGET_C99_FUNCTIONS)
7233 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7237 call = build_call_expr_loc (loc, fn, 1, arg0);
7238 call = builtin_save_expr (call);
7240 return build2 (COMPOUND_EXPR, void_type_node,
7241 build2 (MODIFY_EXPR, void_type_node,
7242 build_fold_indirect_ref_loc (loc, arg1),
7243 build1 (IMAGPART_EXPR, type, call)),
7244 build2 (MODIFY_EXPR, void_type_node,
7245 build_fold_indirect_ref_loc (loc, arg2),
7246 build1 (REALPART_EXPR, type, call)));
7249 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7250 NULL_TREE if no simplification can be made. */
7253 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7256 tree realp, imagp, ifn;
7259 if (!validate_arg (arg0, COMPLEX_TYPE)
7260 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7263 /* Calculate the result when the argument is a constant. */
7264 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7267 rtype = TREE_TYPE (TREE_TYPE (arg0));
7269 /* In case we can figure out the real part of arg0 and it is constant zero
7271 if (!TARGET_C99_FUNCTIONS)
7273 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7277 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7278 && real_zerop (realp))
7280 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7281 return build_call_expr_loc (loc, ifn, 1, narg);
7284 /* In case we can easily decompose real and imaginary parts split cexp
7285 to exp (r) * cexpi (i). */
7286 if (flag_unsafe_math_optimizations
7289 tree rfn, rcall, icall;
7291 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7295 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7299 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7300 icall = builtin_save_expr (icall);
7301 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7302 rcall = builtin_save_expr (rcall);
7303 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7304 fold_build2_loc (loc, MULT_EXPR, rtype,
7306 fold_build1_loc (loc, REALPART_EXPR,
7308 fold_build2_loc (loc, MULT_EXPR, rtype,
7310 fold_build1_loc (loc, IMAGPART_EXPR,
7317 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7318 Return NULL_TREE if no simplification can be made. */
7321 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7323 if (!validate_arg (arg, REAL_TYPE))
7326 /* Optimize trunc of constant value. */
7327 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7329 REAL_VALUE_TYPE r, x;
7330 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7332 x = TREE_REAL_CST (arg);
7333 real_trunc (&r, TYPE_MODE (type), &x);
7334 return build_real (type, r);
7337 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7340 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7341 Return NULL_TREE if no simplification can be made. */
7344 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7346 if (!validate_arg (arg, REAL_TYPE))
7349 /* Optimize floor of constant value. */
7350 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7354 x = TREE_REAL_CST (arg);
7355 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7357 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7360 real_floor (&r, TYPE_MODE (type), &x);
7361 return build_real (type, r);
7365 /* Fold floor (x) where x is nonnegative to trunc (x). */
7366 if (tree_expr_nonnegative_p (arg))
7368 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7370 return build_call_expr_loc (loc, truncfn, 1, arg);
7373 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7376 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7377 Return NULL_TREE if no simplification can be made. */
7380 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7382 if (!validate_arg (arg, REAL_TYPE))
7385 /* Optimize ceil of constant value. */
7386 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7390 x = TREE_REAL_CST (arg);
7391 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7393 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7396 real_ceil (&r, TYPE_MODE (type), &x);
7397 return build_real (type, r);
7401 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7404 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7405 Return NULL_TREE if no simplification can be made. */
7408 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7410 if (!validate_arg (arg, REAL_TYPE))
7413 /* Optimize round of constant value. */
7414 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7418 x = TREE_REAL_CST (arg);
7419 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7421 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7424 real_round (&r, TYPE_MODE (type), &x);
7425 return build_real (type, r);
7429 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7432 /* Fold function call to builtin lround, lroundf or lroundl (or the
7433 corresponding long long versions) and other rounding functions. ARG
7434 is the argument to the call. Return NULL_TREE if no simplification
7438 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7440 if (!validate_arg (arg, REAL_TYPE))
7443 /* Optimize lround of constant value. */
7444 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7446 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7448 if (real_isfinite (&x))
7450 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7451 tree ftype = TREE_TYPE (arg);
7452 unsigned HOST_WIDE_INT lo2;
7453 HOST_WIDE_INT hi, lo;
7456 switch (DECL_FUNCTION_CODE (fndecl))
7458 CASE_FLT_FN (BUILT_IN_LFLOOR):
7459 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7460 real_floor (&r, TYPE_MODE (ftype), &x);
7463 CASE_FLT_FN (BUILT_IN_LCEIL):
7464 CASE_FLT_FN (BUILT_IN_LLCEIL):
7465 real_ceil (&r, TYPE_MODE (ftype), &x);
7468 CASE_FLT_FN (BUILT_IN_LROUND):
7469 CASE_FLT_FN (BUILT_IN_LLROUND):
7470 real_round (&r, TYPE_MODE (ftype), &x);
7477 REAL_VALUE_TO_INT (&lo, &hi, r);
7478 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7479 return build_int_cst_wide (itype, lo2, hi);
7483 switch (DECL_FUNCTION_CODE (fndecl))
7485 CASE_FLT_FN (BUILT_IN_LFLOOR):
7486 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7487 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7488 if (tree_expr_nonnegative_p (arg))
7489 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7490 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7495 return fold_fixed_mathfn (loc, fndecl, arg);
7498 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7499 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7500 the argument to the call. Return NULL_TREE if no simplification can
7504 fold_builtin_bitop (tree fndecl, tree arg)
7506 if (!validate_arg (arg, INTEGER_TYPE))
7509 /* Optimize for constant argument. */
7510 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7512 HOST_WIDE_INT hi, width, result;
7513 unsigned HOST_WIDE_INT lo;
7516 type = TREE_TYPE (arg);
7517 width = TYPE_PRECISION (type);
7518 lo = TREE_INT_CST_LOW (arg);
7520 /* Clear all the bits that are beyond the type's precision. */
7521 if (width > HOST_BITS_PER_WIDE_INT)
7523 hi = TREE_INT_CST_HIGH (arg);
7524 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7525 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7530 if (width < HOST_BITS_PER_WIDE_INT)
7531 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7534 switch (DECL_FUNCTION_CODE (fndecl))
7536 CASE_INT_FN (BUILT_IN_FFS):
7538 result = exact_log2 (lo & -lo) + 1;
7540 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7545 CASE_INT_FN (BUILT_IN_CLZ):
7547 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7549 result = width - floor_log2 (lo) - 1;
7550 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7554 CASE_INT_FN (BUILT_IN_CTZ):
7556 result = exact_log2 (lo & -lo);
7558 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7559 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7563 CASE_INT_FN (BUILT_IN_POPCOUNT):
7566 result++, lo &= lo - 1;
7568 result++, hi &= hi - 1;
7571 CASE_INT_FN (BUILT_IN_PARITY):
7574 result++, lo &= lo - 1;
7576 result++, hi &= hi - 1;
7584 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7590 /* Fold function call to builtin_bswap and the long and long long
7591 variants. Return NULL_TREE if no simplification can be made. */
7593 fold_builtin_bswap (tree fndecl, tree arg)
7595 if (! validate_arg (arg, INTEGER_TYPE))
7598 /* Optimize constant value. */
7599 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7601 HOST_WIDE_INT hi, width, r_hi = 0;
7602 unsigned HOST_WIDE_INT lo, r_lo = 0;
7605 type = TREE_TYPE (arg);
7606 width = TYPE_PRECISION (type);
7607 lo = TREE_INT_CST_LOW (arg);
7608 hi = TREE_INT_CST_HIGH (arg);
7610 switch (DECL_FUNCTION_CODE (fndecl))
7612 case BUILT_IN_BSWAP32:
7613 case BUILT_IN_BSWAP64:
7617 for (s = 0; s < width; s += 8)
7619 int d = width - s - 8;
7620 unsigned HOST_WIDE_INT byte;
7622 if (s < HOST_BITS_PER_WIDE_INT)
7623 byte = (lo >> s) & 0xff;
7625 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7627 if (d < HOST_BITS_PER_WIDE_INT)
7630 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7640 if (width < HOST_BITS_PER_WIDE_INT)
7641 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7643 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7649 /* A subroutine of fold_builtin to fold the various logarithmic
7650 functions. Return NULL_TREE if no simplification can me made.
7651 FUNC is the corresponding MPFR logarithm function. */
7654 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7655 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7657 if (validate_arg (arg, REAL_TYPE))
7659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7661 const enum built_in_function fcode = builtin_mathfn_code (arg);
7663 /* Calculate the result when the argument is a constant. */
7664 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7667 /* Special case, optimize logN(expN(x)) = x. */
7668 if (flag_unsafe_math_optimizations
7669 && ((func == mpfr_log
7670 && (fcode == BUILT_IN_EXP
7671 || fcode == BUILT_IN_EXPF
7672 || fcode == BUILT_IN_EXPL))
7673 || (func == mpfr_log2
7674 && (fcode == BUILT_IN_EXP2
7675 || fcode == BUILT_IN_EXP2F
7676 || fcode == BUILT_IN_EXP2L))
7677 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7678 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7680 /* Optimize logN(func()) for various exponential functions. We
7681 want to determine the value "x" and the power "exponent" in
7682 order to transform logN(x**exponent) into exponent*logN(x). */
7683 if (flag_unsafe_math_optimizations)
7685 tree exponent = 0, x = 0;
7689 CASE_FLT_FN (BUILT_IN_EXP):
7690 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7691 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7693 exponent = CALL_EXPR_ARG (arg, 0);
7695 CASE_FLT_FN (BUILT_IN_EXP2):
7696 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7697 x = build_real (type, dconst2);
7698 exponent = CALL_EXPR_ARG (arg, 0);
7700 CASE_FLT_FN (BUILT_IN_EXP10):
7701 CASE_FLT_FN (BUILT_IN_POW10):
7702 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7704 REAL_VALUE_TYPE dconst10;
7705 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7706 x = build_real (type, dconst10);
7708 exponent = CALL_EXPR_ARG (arg, 0);
7710 CASE_FLT_FN (BUILT_IN_SQRT):
7711 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7712 x = CALL_EXPR_ARG (arg, 0);
7713 exponent = build_real (type, dconsthalf);
7715 CASE_FLT_FN (BUILT_IN_CBRT):
7716 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7717 x = CALL_EXPR_ARG (arg, 0);
7718 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7721 CASE_FLT_FN (BUILT_IN_POW):
7722 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7723 x = CALL_EXPR_ARG (arg, 0);
7724 exponent = CALL_EXPR_ARG (arg, 1);
7730 /* Now perform the optimization. */
7733 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7734 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7742 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7743 NULL_TREE if no simplification can be made. */
7746 fold_builtin_hypot (location_t loc, tree fndecl,
7747 tree arg0, tree arg1, tree type)
7749 tree res, narg0, narg1;
7751 if (!validate_arg (arg0, REAL_TYPE)
7752 || !validate_arg (arg1, REAL_TYPE))
7755 /* Calculate the result when the argument is a constant. */
7756 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7759 /* If either argument to hypot has a negate or abs, strip that off.
7760 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7761 narg0 = fold_strip_sign_ops (arg0);
7762 narg1 = fold_strip_sign_ops (arg1);
7765 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7766 narg1 ? narg1 : arg1);
7769 /* If either argument is zero, hypot is fabs of the other. */
7770 if (real_zerop (arg0))
7771 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7772 else if (real_zerop (arg1))
7773 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7775 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7776 if (flag_unsafe_math_optimizations
7777 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7779 const REAL_VALUE_TYPE sqrt2_trunc
7780 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7781 return fold_build2_loc (loc, MULT_EXPR, type,
7782 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7783 build_real (type, sqrt2_trunc));
7790 /* Fold a builtin function call to pow, powf, or powl. Return
7791 NULL_TREE if no simplification can be made. */
7793 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7797 if (!validate_arg (arg0, REAL_TYPE)
7798 || !validate_arg (arg1, REAL_TYPE))
7801 /* Calculate the result when the argument is a constant. */
7802 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7805 /* Optimize pow(1.0,y) = 1.0. */
7806 if (real_onep (arg0))
7807 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7809 if (TREE_CODE (arg1) == REAL_CST
7810 && !TREE_OVERFLOW (arg1))
7812 REAL_VALUE_TYPE cint;
7816 c = TREE_REAL_CST (arg1);
7818 /* Optimize pow(x,0.0) = 1.0. */
7819 if (REAL_VALUES_EQUAL (c, dconst0))
7820 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7823 /* Optimize pow(x,1.0) = x. */
7824 if (REAL_VALUES_EQUAL (c, dconst1))
7827 /* Optimize pow(x,-1.0) = 1.0/x. */
7828 if (REAL_VALUES_EQUAL (c, dconstm1))
7829 return fold_build2_loc (loc, RDIV_EXPR, type,
7830 build_real (type, dconst1), arg0);
7832 /* Optimize pow(x,0.5) = sqrt(x). */
7833 if (flag_unsafe_math_optimizations
7834 && REAL_VALUES_EQUAL (c, dconsthalf))
7836 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7838 if (sqrtfn != NULL_TREE)
7839 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7842 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7843 if (flag_unsafe_math_optimizations)
7845 const REAL_VALUE_TYPE dconstroot
7846 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7848 if (REAL_VALUES_EQUAL (c, dconstroot))
7850 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7851 if (cbrtfn != NULL_TREE)
7852 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7856 /* Check for an integer exponent. */
7857 n = real_to_integer (&c);
7858 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7859 if (real_identical (&c, &cint))
7861 /* Attempt to evaluate pow at compile-time, unless this should
7862 raise an exception. */
7863 if (TREE_CODE (arg0) == REAL_CST
7864 && !TREE_OVERFLOW (arg0)
7866 || (!flag_trapping_math && !flag_errno_math)
7867 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7872 x = TREE_REAL_CST (arg0);
7873 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7874 if (flag_unsafe_math_optimizations || !inexact)
7875 return build_real (type, x);
7878 /* Strip sign ops from even integer powers. */
7879 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7881 tree narg0 = fold_strip_sign_ops (arg0);
7883 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7888 if (flag_unsafe_math_optimizations)
7890 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7892 /* Optimize pow(expN(x),y) = expN(x*y). */
7893 if (BUILTIN_EXPONENT_P (fcode))
7895 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7896 tree arg = CALL_EXPR_ARG (arg0, 0);
7897 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7898 return build_call_expr_loc (loc, expfn, 1, arg);
7901 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7902 if (BUILTIN_SQRT_P (fcode))
7904 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7905 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7906 build_real (type, dconsthalf));
7907 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7910 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7911 if (BUILTIN_CBRT_P (fcode))
7913 tree arg = CALL_EXPR_ARG (arg0, 0);
7914 if (tree_expr_nonnegative_p (arg))
7916 const REAL_VALUE_TYPE dconstroot
7917 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7918 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7919 build_real (type, dconstroot));
7920 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7924 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7925 if (fcode == BUILT_IN_POW
7926 || fcode == BUILT_IN_POWF
7927 || fcode == BUILT_IN_POWL)
7929 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7930 if (tree_expr_nonnegative_p (arg00))
7932 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7933 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7934 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7942 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7943 Return NULL_TREE if no simplification can be made. */
7945 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7946 tree arg0, tree arg1, tree type)
7948 if (!validate_arg (arg0, REAL_TYPE)
7949 || !validate_arg (arg1, INTEGER_TYPE))
7952 /* Optimize pow(1.0,y) = 1.0. */
7953 if (real_onep (arg0))
7954 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7956 if (host_integerp (arg1, 0))
7958 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7960 /* Evaluate powi at compile-time. */
7961 if (TREE_CODE (arg0) == REAL_CST
7962 && !TREE_OVERFLOW (arg0))
7965 x = TREE_REAL_CST (arg0);
7966 real_powi (&x, TYPE_MODE (type), &x, c);
7967 return build_real (type, x);
7970 /* Optimize pow(x,0) = 1.0. */
7972 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7975 /* Optimize pow(x,1) = x. */
7979 /* Optimize pow(x,-1) = 1.0/x. */
7981 return fold_build2_loc (loc, RDIV_EXPR, type,
7982 build_real (type, dconst1), arg0);
7988 /* A subroutine of fold_builtin to fold the various exponent
7989 functions. Return NULL_TREE if no simplification can be made.
7990 FUNC is the corresponding MPFR exponent function. */
7993 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7994 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7996 if (validate_arg (arg, REAL_TYPE))
7998 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8001 /* Calculate the result when the argument is a constant. */
8002 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8005 /* Optimize expN(logN(x)) = x. */
8006 if (flag_unsafe_math_optimizations)
8008 const enum built_in_function fcode = builtin_mathfn_code (arg);
8010 if ((func == mpfr_exp
8011 && (fcode == BUILT_IN_LOG
8012 || fcode == BUILT_IN_LOGF
8013 || fcode == BUILT_IN_LOGL))
8014 || (func == mpfr_exp2
8015 && (fcode == BUILT_IN_LOG2
8016 || fcode == BUILT_IN_LOG2F
8017 || fcode == BUILT_IN_LOG2L))
8018 || (func == mpfr_exp10
8019 && (fcode == BUILT_IN_LOG10
8020 || fcode == BUILT_IN_LOG10F
8021 || fcode == BUILT_IN_LOG10L)))
8022 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8029 /* Return true if VAR is a VAR_DECL or a component thereof. */
8032 var_decl_component_p (tree var)
8035 while (handled_component_p (inner))
8036 inner = TREE_OPERAND (inner, 0);
8037 return SSA_VAR_P (inner);
8040 /* Fold function call to builtin memset. Return
8041 NULL_TREE if no simplification can be made. */
8044 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8045 tree type, bool ignore)
8047 tree var, ret, etype;
8048 unsigned HOST_WIDE_INT length, cval;
8050 if (! validate_arg (dest, POINTER_TYPE)
8051 || ! validate_arg (c, INTEGER_TYPE)
8052 || ! validate_arg (len, INTEGER_TYPE))
8055 if (! host_integerp (len, 1))
8058 /* If the LEN parameter is zero, return DEST. */
8059 if (integer_zerop (len))
8060 return omit_one_operand_loc (loc, type, dest, c);
8062 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8067 if (TREE_CODE (var) != ADDR_EXPR)
8070 var = TREE_OPERAND (var, 0);
8071 if (TREE_THIS_VOLATILE (var))
8074 etype = TREE_TYPE (var);
8075 if (TREE_CODE (etype) == ARRAY_TYPE)
8076 etype = TREE_TYPE (etype);
8078 if (!INTEGRAL_TYPE_P (etype)
8079 && !POINTER_TYPE_P (etype))
8082 if (! var_decl_component_p (var))
8085 length = tree_low_cst (len, 1);
8086 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8087 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8091 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8094 if (integer_zerop (c))
8098 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8101 cval = tree_low_cst (c, 1);
8105 cval |= (cval << 31) << 1;
8108 ret = build_int_cst_type (etype, cval);
8109 var = build_fold_indirect_ref_loc (loc,
8110 fold_convert_loc (loc,
8111 build_pointer_type (etype),
8113 ret = build2 (MODIFY_EXPR, etype, var, ret);
8117 return omit_one_operand_loc (loc, type, dest, ret);
8120 /* Fold function call to builtin memset. Return
8121 NULL_TREE if no simplification can be made. */
8124 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8126 if (! validate_arg (dest, POINTER_TYPE)
8127 || ! validate_arg (size, INTEGER_TYPE))
8133 /* New argument list transforming bzero(ptr x, int y) to
8134 memset(ptr x, int 0, size_t y). This is done this way
8135 so that if it isn't expanded inline, we fallback to
8136 calling bzero instead of memset. */
8138 return fold_builtin_memset (loc, dest, integer_zero_node,
8139 fold_convert_loc (loc, sizetype, size),
8140 void_type_node, ignore);
8143 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8144 NULL_TREE if no simplification can be made.
8145 If ENDP is 0, return DEST (like memcpy).
8146 If ENDP is 1, return DEST+LEN (like mempcpy).
8147 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8148 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8152 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8153 tree len, tree type, bool ignore, int endp)
8155 tree destvar, srcvar, expr;
8157 if (! validate_arg (dest, POINTER_TYPE)
8158 || ! validate_arg (src, POINTER_TYPE)
8159 || ! validate_arg (len, INTEGER_TYPE))
8162 /* If the LEN parameter is zero, return DEST. */
8163 if (integer_zerop (len))
8164 return omit_one_operand_loc (loc, type, dest, src);
8166 /* If SRC and DEST are the same (and not volatile), return
8167 DEST{,+LEN,+LEN-1}. */
8168 if (operand_equal_p (src, dest, 0))
8172 tree srctype, desttype;
8173 int src_align, dest_align;
8177 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8178 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8180 /* Both DEST and SRC must be pointer types.
8181 ??? This is what old code did. Is the testing for pointer types
8184 If either SRC is readonly or length is 1, we can use memcpy. */
8185 if (!dest_align || !src_align)
8187 if (readonly_data_expr (src)
8188 || (host_integerp (len, 1)
8189 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8190 >= tree_low_cst (len, 1))))
8192 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8195 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8198 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8199 srcvar = build_fold_indirect_ref_loc (loc, src);
8200 destvar = build_fold_indirect_ref_loc (loc, dest);
8202 && !TREE_THIS_VOLATILE (srcvar)
8204 && !TREE_THIS_VOLATILE (destvar))
8206 tree src_base, dest_base, fn;
8207 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8208 HOST_WIDE_INT size = -1;
8209 HOST_WIDE_INT maxsize = -1;
8212 if (handled_component_p (src_base))
8213 src_base = get_ref_base_and_extent (src_base, &src_offset,
8215 dest_base = destvar;
8216 if (handled_component_p (dest_base))
8217 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8219 if (host_integerp (len, 1))
8221 maxsize = tree_low_cst (len, 1);
8223 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8226 maxsize *= BITS_PER_UNIT;
8230 if (SSA_VAR_P (src_base)
8231 && SSA_VAR_P (dest_base))
8233 if (operand_equal_p (src_base, dest_base, 0)
8234 && ranges_overlap_p (src_offset, maxsize,
8235 dest_offset, maxsize))
8238 else if (TREE_CODE (src_base) == INDIRECT_REF
8239 && TREE_CODE (dest_base) == INDIRECT_REF)
8241 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8242 TREE_OPERAND (dest_base, 0), 0)
8243 || ranges_overlap_p (src_offset, maxsize,
8244 dest_offset, maxsize))
8250 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8253 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8258 if (!host_integerp (len, 0))
8261 This logic lose for arguments like (type *)malloc (sizeof (type)),
8262 since we strip the casts of up to VOID return value from malloc.
8263 Perhaps we ought to inherit type from non-VOID argument here? */
8266 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8267 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8269 tree tem = TREE_OPERAND (src, 0);
8271 if (tem != TREE_OPERAND (src, 0))
8272 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8274 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8276 tree tem = TREE_OPERAND (dest, 0);
8278 if (tem != TREE_OPERAND (dest, 0))
8279 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8281 srctype = TREE_TYPE (TREE_TYPE (src));
8283 && TREE_CODE (srctype) == ARRAY_TYPE
8284 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8286 srctype = TREE_TYPE (srctype);
8288 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8290 desttype = TREE_TYPE (TREE_TYPE (dest));
8292 && TREE_CODE (desttype) == ARRAY_TYPE
8293 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8295 desttype = TREE_TYPE (desttype);
8297 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8299 if (!srctype || !desttype
8300 || !TYPE_SIZE_UNIT (srctype)
8301 || !TYPE_SIZE_UNIT (desttype)
8302 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8303 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8304 || TYPE_VOLATILE (srctype)
8305 || TYPE_VOLATILE (desttype))
8308 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8309 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8310 if (dest_align < (int) TYPE_ALIGN (desttype)
8311 || src_align < (int) TYPE_ALIGN (srctype))
8315 dest = builtin_save_expr (dest);
8318 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8320 srcvar = build_fold_indirect_ref_loc (loc, src);
8321 if (TREE_THIS_VOLATILE (srcvar))
8323 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8325 /* With memcpy, it is possible to bypass aliasing rules, so without
8326 this check i.e. execute/20060930-2.c would be misoptimized,
8327 because it use conflicting alias set to hold argument for the
8328 memcpy call. This check is probably unnecessary with
8329 -fno-strict-aliasing. Similarly for destvar. See also
8331 else if (!var_decl_component_p (srcvar))
8335 destvar = NULL_TREE;
8336 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8338 destvar = build_fold_indirect_ref_loc (loc, dest);
8339 if (TREE_THIS_VOLATILE (destvar))
8341 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8342 destvar = NULL_TREE;
8343 else if (!var_decl_component_p (destvar))
8344 destvar = NULL_TREE;
8347 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8350 if (srcvar == NULL_TREE)
8353 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8356 srctype = build_qualified_type (desttype, 0);
8357 if (src_align < (int) TYPE_ALIGN (srctype))
8359 if (AGGREGATE_TYPE_P (srctype)
8360 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8363 srctype = build_variant_type_copy (srctype);
8364 TYPE_ALIGN (srctype) = src_align;
8365 TYPE_USER_ALIGN (srctype) = 1;
8366 TYPE_PACKED (srctype) = 1;
8368 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8369 src = fold_convert_loc (loc, srcptype, src);
8370 srcvar = build_fold_indirect_ref_loc (loc, src);
8372 else if (destvar == NULL_TREE)
8375 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8378 desttype = build_qualified_type (srctype, 0);
8379 if (dest_align < (int) TYPE_ALIGN (desttype))
8381 if (AGGREGATE_TYPE_P (desttype)
8382 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8385 desttype = build_variant_type_copy (desttype);
8386 TYPE_ALIGN (desttype) = dest_align;
8387 TYPE_USER_ALIGN (desttype) = 1;
8388 TYPE_PACKED (desttype) = 1;
8390 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8391 dest = fold_convert_loc (loc, destptype, dest);
8392 destvar = build_fold_indirect_ref_loc (loc, dest);
8395 if (srctype == desttype
8396 || (gimple_in_ssa_p (cfun)
8397 && useless_type_conversion_p (desttype, srctype)))
8399 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8400 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8401 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8402 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8403 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8405 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8406 TREE_TYPE (destvar), srcvar);
8407 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8413 if (endp == 0 || endp == 3)
8414 return omit_one_operand_loc (loc, type, dest, expr);
8420 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8423 len = fold_convert_loc (loc, sizetype, len);
8424 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8425 dest = fold_convert_loc (loc, type, dest);
8427 dest = omit_one_operand_loc (loc, type, dest, expr);
8431 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8432 If LEN is not NULL, it represents the length of the string to be
8433 copied. Return NULL_TREE if no simplification can be made. */
8436 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8440 if (!validate_arg (dest, POINTER_TYPE)
8441 || !validate_arg (src, POINTER_TYPE))
8444 /* If SRC and DEST are the same (and not volatile), return DEST. */
8445 if (operand_equal_p (src, dest, 0))
8446 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8448 if (optimize_function_for_size_p (cfun))
8451 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8457 len = c_strlen (src, 1);
8458 if (! len || TREE_SIDE_EFFECTS (len))
8462 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8463 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8464 build_call_expr_loc (loc, fn, 3, dest, src, len));
8467 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8468 Return NULL_TREE if no simplification can be made. */
8471 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8473 tree fn, len, lenp1, call, type;
8475 if (!validate_arg (dest, POINTER_TYPE)
8476 || !validate_arg (src, POINTER_TYPE))
8479 len = c_strlen (src, 1);
8481 || TREE_CODE (len) != INTEGER_CST)
8484 if (optimize_function_for_size_p (cfun)
8485 /* If length is zero it's small enough. */
8486 && !integer_zerop (len))
8489 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8493 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8494 /* We use dest twice in building our expression. Save it from
8495 multiple expansions. */
8496 dest = builtin_save_expr (dest);
8497 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8499 type = TREE_TYPE (TREE_TYPE (fndecl));
8500 len = fold_convert_loc (loc, sizetype, len);
8501 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8502 dest = fold_convert_loc (loc, type, dest);
8503 dest = omit_one_operand_loc (loc, type, dest, call);
8507 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8508 If SLEN is not NULL, it represents the length of the source string.
8509 Return NULL_TREE if no simplification can be made. */
8512 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8513 tree src, tree len, tree slen)
8517 if (!validate_arg (dest, POINTER_TYPE)
8518 || !validate_arg (src, POINTER_TYPE)
8519 || !validate_arg (len, INTEGER_TYPE))
8522 /* If the LEN parameter is zero, return DEST. */
8523 if (integer_zerop (len))
8524 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8526 /* We can't compare slen with len as constants below if len is not a
8528 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8532 slen = c_strlen (src, 1);
8534 /* Now, we must be passed a constant src ptr parameter. */
8535 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8538 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8540 /* We do not support simplification of this case, though we do
8541 support it when expanding trees into RTL. */
8542 /* FIXME: generate a call to __builtin_memset. */
8543 if (tree_int_cst_lt (slen, len))
8546 /* OK transform into builtin memcpy. */
8547 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8550 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8551 build_call_expr_loc (loc, fn, 3, dest, src, len));
8554 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8555 arguments to the call, and TYPE is its return type.
8556 Return NULL_TREE if no simplification can be made. */
8559 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8561 if (!validate_arg (arg1, POINTER_TYPE)
8562 || !validate_arg (arg2, INTEGER_TYPE)
8563 || !validate_arg (len, INTEGER_TYPE))
8569 if (TREE_CODE (arg2) != INTEGER_CST
8570 || !host_integerp (len, 1))
8573 p1 = c_getstr (arg1);
8574 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8580 if (target_char_cast (arg2, &c))
8583 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8586 return build_int_cst (TREE_TYPE (arg1), 0);
8588 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8590 return fold_convert_loc (loc, type, tem);
8596 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8597 Return NULL_TREE if no simplification can be made. */
8600 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8602 const char *p1, *p2;
8604 if (!validate_arg (arg1, POINTER_TYPE)
8605 || !validate_arg (arg2, POINTER_TYPE)
8606 || !validate_arg (len, INTEGER_TYPE))
8609 /* If the LEN parameter is zero, return zero. */
8610 if (integer_zerop (len))
8611 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8614 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8615 if (operand_equal_p (arg1, arg2, 0))
8616 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8618 p1 = c_getstr (arg1);
8619 p2 = c_getstr (arg2);
8621 /* If all arguments are constant, and the value of len is not greater
8622 than the lengths of arg1 and arg2, evaluate at compile-time. */
8623 if (host_integerp (len, 1) && p1 && p2
8624 && compare_tree_int (len, strlen (p1) + 1) <= 0
8625 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8627 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8630 return integer_one_node;
8632 return integer_minus_one_node;
8634 return integer_zero_node;
8637 /* If len parameter is one, return an expression corresponding to
8638 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8639 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8641 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8642 tree cst_uchar_ptr_node
8643 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8646 = fold_convert_loc (loc, integer_type_node,
8647 build1 (INDIRECT_REF, cst_uchar_node,
8648 fold_convert_loc (loc,
8652 = fold_convert_loc (loc, integer_type_node,
8653 build1 (INDIRECT_REF, cst_uchar_node,
8654 fold_convert_loc (loc,
8657 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8663 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8664 Return NULL_TREE if no simplification can be made. */
8667 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8669 const char *p1, *p2;
8671 if (!validate_arg (arg1, POINTER_TYPE)
8672 || !validate_arg (arg2, POINTER_TYPE))
8675 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8676 if (operand_equal_p (arg1, arg2, 0))
8677 return integer_zero_node;
8679 p1 = c_getstr (arg1);
8680 p2 = c_getstr (arg2);
8684 const int i = strcmp (p1, p2);
8686 return integer_minus_one_node;
8688 return integer_one_node;
8690 return integer_zero_node;
8693 /* If the second arg is "", return *(const unsigned char*)arg1. */
8694 if (p2 && *p2 == '\0')
8696 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8697 tree cst_uchar_ptr_node
8698 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8700 return fold_convert_loc (loc, integer_type_node,
8701 build1 (INDIRECT_REF, cst_uchar_node,
8702 fold_convert_loc (loc,
8707 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8708 if (p1 && *p1 == '\0')
8710 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8711 tree cst_uchar_ptr_node
8712 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8715 = fold_convert_loc (loc, integer_type_node,
8716 build1 (INDIRECT_REF, cst_uchar_node,
8717 fold_convert_loc (loc,
8720 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8726 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8727 Return NULL_TREE if no simplification can be made. */
8730 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8732 const char *p1, *p2;
8734 if (!validate_arg (arg1, POINTER_TYPE)
8735 || !validate_arg (arg2, POINTER_TYPE)
8736 || !validate_arg (len, INTEGER_TYPE))
8739 /* If the LEN parameter is zero, return zero. */
8740 if (integer_zerop (len))
8741 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8744 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8745 if (operand_equal_p (arg1, arg2, 0))
8746 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8748 p1 = c_getstr (arg1);
8749 p2 = c_getstr (arg2);
8751 if (host_integerp (len, 1) && p1 && p2)
8753 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8755 return integer_one_node;
8757 return integer_minus_one_node;
8759 return integer_zero_node;
8762 /* If the second arg is "", and the length is greater than zero,
8763 return *(const unsigned char*)arg1. */
8764 if (p2 && *p2 == '\0'
8765 && TREE_CODE (len) == INTEGER_CST
8766 && tree_int_cst_sgn (len) == 1)
8768 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8769 tree cst_uchar_ptr_node
8770 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8772 return fold_convert_loc (loc, integer_type_node,
8773 build1 (INDIRECT_REF, cst_uchar_node,
8774 fold_convert_loc (loc,
8779 /* If the first arg is "", and the length is greater than zero,
8780 return -*(const unsigned char*)arg2. */
8781 if (p1 && *p1 == '\0'
8782 && TREE_CODE (len) == INTEGER_CST
8783 && tree_int_cst_sgn (len) == 1)
8785 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8786 tree cst_uchar_ptr_node
8787 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8789 tree temp = fold_convert_loc (loc, integer_type_node,
8790 build1 (INDIRECT_REF, cst_uchar_node,
8791 fold_convert_loc (loc,
8794 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8797 /* If len parameter is one, return an expression corresponding to
8798 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8799 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8801 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8802 tree cst_uchar_ptr_node
8803 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8805 tree ind1 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8810 tree ind2 = fold_convert_loc (loc, integer_type_node,
8811 build1 (INDIRECT_REF, cst_uchar_node,
8812 fold_convert_loc (loc,
8815 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8821 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8822 ARG. Return NULL_TREE if no simplification can be made. */
8825 fold_builtin_signbit (location_t loc, tree arg, tree type)
8829 if (!validate_arg (arg, REAL_TYPE))
8832 /* If ARG is a compile-time constant, determine the result. */
8833 if (TREE_CODE (arg) == REAL_CST
8834 && !TREE_OVERFLOW (arg))
8838 c = TREE_REAL_CST (arg);
8839 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8840 return fold_convert_loc (loc, type, temp);
8843 /* If ARG is non-negative, the result is always zero. */
8844 if (tree_expr_nonnegative_p (arg))
8845 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8847 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8848 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8849 return fold_build2_loc (loc, LT_EXPR, type, arg,
8850 build_real (TREE_TYPE (arg), dconst0));
8855 /* Fold function call to builtin copysign, copysignf or copysignl with
8856 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8860 fold_builtin_copysign (location_t loc, tree fndecl,
8861 tree arg1, tree arg2, tree type)
8865 if (!validate_arg (arg1, REAL_TYPE)
8866 || !validate_arg (arg2, REAL_TYPE))
8869 /* copysign(X,X) is X. */
8870 if (operand_equal_p (arg1, arg2, 0))
8871 return fold_convert_loc (loc, type, arg1);
8873 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8874 if (TREE_CODE (arg1) == REAL_CST
8875 && TREE_CODE (arg2) == REAL_CST
8876 && !TREE_OVERFLOW (arg1)
8877 && !TREE_OVERFLOW (arg2))
8879 REAL_VALUE_TYPE c1, c2;
8881 c1 = TREE_REAL_CST (arg1);
8882 c2 = TREE_REAL_CST (arg2);
8883 /* c1.sign := c2.sign. */
8884 real_copysign (&c1, &c2);
8885 return build_real (type, c1);
8888 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8889 Remember to evaluate Y for side-effects. */
8890 if (tree_expr_nonnegative_p (arg2))
8891 return omit_one_operand_loc (loc, type,
8892 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8895 /* Strip sign changing operations for the first argument. */
8896 tem = fold_strip_sign_ops (arg1);
8898 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8903 /* Fold a call to builtin isascii with argument ARG. */
8906 fold_builtin_isascii (location_t loc, tree arg)
8908 if (!validate_arg (arg, INTEGER_TYPE))
8912 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8913 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8914 build_int_cst (NULL_TREE,
8915 ~ (unsigned HOST_WIDE_INT) 0x7f));
8916 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8917 arg, integer_zero_node);
8921 /* Fold a call to builtin toascii with argument ARG. */
8924 fold_builtin_toascii (location_t loc, tree arg)
8926 if (!validate_arg (arg, INTEGER_TYPE))
8929 /* Transform toascii(c) -> (c & 0x7f). */
8930 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8931 build_int_cst (NULL_TREE, 0x7f));
8934 /* Fold a call to builtin isdigit with argument ARG. */
8937 fold_builtin_isdigit (location_t loc, tree arg)
8939 if (!validate_arg (arg, INTEGER_TYPE))
8943 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8944 /* According to the C standard, isdigit is unaffected by locale.
8945 However, it definitely is affected by the target character set. */
8946 unsigned HOST_WIDE_INT target_digit0
8947 = lang_hooks.to_target_charset ('0');
8949 if (target_digit0 == 0)
8952 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8953 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8954 build_int_cst (unsigned_type_node, target_digit0));
8955 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8956 build_int_cst (unsigned_type_node, 9));
8960 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8963 fold_builtin_fabs (location_t loc, tree arg, tree type)
8965 if (!validate_arg (arg, REAL_TYPE))
8968 arg = fold_convert_loc (loc, type, arg);
8969 if (TREE_CODE (arg) == REAL_CST)
8970 return fold_abs_const (arg, type);
8971 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8974 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8977 fold_builtin_abs (location_t loc, tree arg, tree type)
8979 if (!validate_arg (arg, INTEGER_TYPE))
8982 arg = fold_convert_loc (loc, type, arg);
8983 if (TREE_CODE (arg) == INTEGER_CST)
8984 return fold_abs_const (arg, type);
8985 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8988 /* Fold a call to builtin fmin or fmax. */
8991 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8992 tree type, bool max)
8994 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8996 /* Calculate the result when the argument is a constant. */
8997 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9002 /* If either argument is NaN, return the other one. Avoid the
9003 transformation if we get (and honor) a signalling NaN. Using
9004 omit_one_operand() ensures we create a non-lvalue. */
9005 if (TREE_CODE (arg0) == REAL_CST
9006 && real_isnan (&TREE_REAL_CST (arg0))
9007 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9008 || ! TREE_REAL_CST (arg0).signalling))
9009 return omit_one_operand_loc (loc, type, arg1, arg0);
9010 if (TREE_CODE (arg1) == REAL_CST
9011 && real_isnan (&TREE_REAL_CST (arg1))
9012 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9013 || ! TREE_REAL_CST (arg1).signalling))
9014 return omit_one_operand_loc (loc, type, arg0, arg1);
9016 /* Transform fmin/fmax(x,x) -> x. */
9017 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9018 return omit_one_operand_loc (loc, type, arg0, arg1);
9020 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9021 functions to return the numeric arg if the other one is NaN.
9022 These tree codes don't honor that, so only transform if
9023 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9024 handled, so we don't have to worry about it either. */
9025 if (flag_finite_math_only)
9026 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9027 fold_convert_loc (loc, type, arg0),
9028 fold_convert_loc (loc, type, arg1));
9033 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9036 fold_builtin_carg (location_t loc, tree arg, tree type)
9038 if (validate_arg (arg, COMPLEX_TYPE)
9039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9041 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9045 tree new_arg = builtin_save_expr (arg);
9046 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9047 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9048 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9055 /* Fold a call to builtin logb/ilogb. */
9058 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9060 if (! validate_arg (arg, REAL_TYPE))
9065 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9067 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9073 /* If arg is Inf or NaN and we're logb, return it. */
9074 if (TREE_CODE (rettype) == REAL_TYPE)
9075 return fold_convert_loc (loc, rettype, arg);
9076 /* Fall through... */
9078 /* Zero may set errno and/or raise an exception for logb, also
9079 for ilogb we don't know FP_ILOGB0. */
9082 /* For normal numbers, proceed iff radix == 2. In GCC,
9083 normalized significands are in the range [0.5, 1.0). We
9084 want the exponent as if they were [1.0, 2.0) so get the
9085 exponent and subtract 1. */
9086 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9087 return fold_convert_loc (loc, rettype,
9088 build_int_cst (NULL_TREE,
9089 REAL_EXP (value)-1));
9097 /* Fold a call to builtin significand, if radix == 2. */
9100 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9102 if (! validate_arg (arg, REAL_TYPE))
9107 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9109 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9116 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9117 return fold_convert_loc (loc, rettype, arg);
9119 /* For normal numbers, proceed iff radix == 2. */
9120 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9122 REAL_VALUE_TYPE result = *value;
9123 /* In GCC, normalized significands are in the range [0.5,
9124 1.0). We want them to be [1.0, 2.0) so set the
9126 SET_REAL_EXP (&result, 1);
9127 return build_real (rettype, result);
9136 /* Fold a call to builtin frexp, we can assume the base is 2. */
9139 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9141 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9146 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9149 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9151 /* Proceed if a valid pointer type was passed in. */
9152 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9154 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9160 /* For +-0, return (*exp = 0, +-0). */
9161 exp = integer_zero_node;
9166 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9167 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9170 /* Since the frexp function always expects base 2, and in
9171 GCC normalized significands are already in the range
9172 [0.5, 1.0), we have exactly what frexp wants. */
9173 REAL_VALUE_TYPE frac_rvt = *value;
9174 SET_REAL_EXP (&frac_rvt, 0);
9175 frac = build_real (rettype, frac_rvt);
9176 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9183 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9184 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9185 TREE_SIDE_EFFECTS (arg1) = 1;
9186 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9192 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9193 then we can assume the base is two. If it's false, then we have to
9194 check the mode of the TYPE parameter in certain cases. */
9197 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9198 tree type, bool ldexp)
9200 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9205 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9206 if (real_zerop (arg0) || integer_zerop (arg1)
9207 || (TREE_CODE (arg0) == REAL_CST
9208 && !real_isfinite (&TREE_REAL_CST (arg0))))
9209 return omit_one_operand_loc (loc, type, arg0, arg1);
9211 /* If both arguments are constant, then try to evaluate it. */
9212 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9213 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9214 && host_integerp (arg1, 0))
9216 /* Bound the maximum adjustment to twice the range of the
9217 mode's valid exponents. Use abs to ensure the range is
9218 positive as a sanity check. */
9219 const long max_exp_adj = 2 *
9220 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9221 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9223 /* Get the user-requested adjustment. */
9224 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9226 /* The requested adjustment must be inside this range. This
9227 is a preliminary cap to avoid things like overflow, we
9228 may still fail to compute the result for other reasons. */
9229 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9231 REAL_VALUE_TYPE initial_result;
9233 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9235 /* Ensure we didn't overflow. */
9236 if (! real_isinf (&initial_result))
9238 const REAL_VALUE_TYPE trunc_result
9239 = real_value_truncate (TYPE_MODE (type), initial_result);
9241 /* Only proceed if the target mode can hold the
9243 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9244 return build_real (type, trunc_result);
9253 /* Fold a call to builtin modf. */
9256 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9258 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9263 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9266 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9268 /* Proceed if a valid pointer type was passed in. */
9269 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9271 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9272 REAL_VALUE_TYPE trunc, frac;
9278 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9279 trunc = frac = *value;
9282 /* For +-Inf, return (*arg1 = arg0, +-0). */
9284 frac.sign = value->sign;
9288 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9289 real_trunc (&trunc, VOIDmode, value);
9290 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9291 /* If the original number was negative and already
9292 integral, then the fractional part is -0.0. */
9293 if (value->sign && frac.cl == rvc_zero)
9294 frac.sign = value->sign;
9298 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9299 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9300 build_real (rettype, trunc));
9301 TREE_SIDE_EFFECTS (arg1) = 1;
9302 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9303 build_real (rettype, frac));
9309 /* Given a location LOC, an interclass builtin function decl FNDECL
9310 and its single argument ARG, return an folded expression computing
9311 the same, or NULL_TREE if we either couldn't or didn't want to fold
9312 (the latter happen if there's an RTL instruction available). */
9315 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9317 enum machine_mode mode;
9319 if (!validate_arg (arg, REAL_TYPE))
9322 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9325 mode = TYPE_MODE (TREE_TYPE (arg));
9327 /* If there is no optab, try generic code. */
9328 switch (DECL_FUNCTION_CODE (fndecl))
9332 CASE_FLT_FN (BUILT_IN_ISINF):
9334 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9335 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9336 tree const type = TREE_TYPE (arg);
9340 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9341 real_from_string (&r, buf);
9342 result = build_call_expr (isgr_fn, 2,
9343 fold_build1_loc (loc, ABS_EXPR, type, arg),
9344 build_real (type, r));
9347 CASE_FLT_FN (BUILT_IN_FINITE):
9348 case BUILT_IN_ISFINITE:
9350 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9351 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9352 tree const type = TREE_TYPE (arg);
9356 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9357 real_from_string (&r, buf);
9358 result = build_call_expr (isle_fn, 2,
9359 fold_build1_loc (loc, ABS_EXPR, type, arg),
9360 build_real (type, r));
9361 /*result = fold_build2_loc (loc, UNGT_EXPR,
9362 TREE_TYPE (TREE_TYPE (fndecl)),
9363 fold_build1_loc (loc, ABS_EXPR, type, arg),
9364 build_real (type, r));
9365 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9366 TREE_TYPE (TREE_TYPE (fndecl)),
9370 case BUILT_IN_ISNORMAL:
9372 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9373 islessequal(fabs(x),DBL_MAX). */
9374 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9375 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9376 tree const type = TREE_TYPE (arg);
9377 REAL_VALUE_TYPE rmax, rmin;
9380 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9381 real_from_string (&rmax, buf);
9382 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9383 real_from_string (&rmin, buf);
9384 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9385 result = build_call_expr (isle_fn, 2, arg,
9386 build_real (type, rmax));
9387 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9388 build_call_expr (isge_fn, 2, arg,
9389 build_real (type, rmin)));
9399 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9400 ARG is the argument for the call. */
9403 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9405 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9408 if (!validate_arg (arg, REAL_TYPE))
9411 switch (builtin_index)
9413 case BUILT_IN_ISINF:
9414 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9415 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9417 if (TREE_CODE (arg) == REAL_CST)
9419 r = TREE_REAL_CST (arg);
9420 if (real_isinf (&r))
9421 return real_compare (GT_EXPR, &r, &dconst0)
9422 ? integer_one_node : integer_minus_one_node;
9424 return integer_zero_node;
9429 case BUILT_IN_ISINF_SIGN:
9431 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9432 /* In a boolean context, GCC will fold the inner COND_EXPR to
9433 1. So e.g. "if (isinf_sign(x))" would be folded to just
9434 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9435 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9436 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9437 tree tmp = NULL_TREE;
9439 arg = builtin_save_expr (arg);
9441 if (signbit_fn && isinf_fn)
9443 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9444 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9446 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9447 signbit_call, integer_zero_node);
9448 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9449 isinf_call, integer_zero_node);
9451 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9452 integer_minus_one_node, integer_one_node);
9453 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9461 case BUILT_IN_ISFINITE:
9462 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9463 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9464 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9466 if (TREE_CODE (arg) == REAL_CST)
9468 r = TREE_REAL_CST (arg);
9469 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9474 case BUILT_IN_ISNAN:
9475 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9476 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9478 if (TREE_CODE (arg) == REAL_CST)
9480 r = TREE_REAL_CST (arg);
9481 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9484 arg = builtin_save_expr (arg);
9485 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9492 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9493 This builtin will generate code to return the appropriate floating
9494 point classification depending on the value of the floating point
9495 number passed in. The possible return values must be supplied as
9496 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9497 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9498 one floating point argument which is "type generic". */
9501 fold_builtin_fpclassify (location_t loc, tree exp)
9503 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9504 arg, type, res, tmp;
9505 enum machine_mode mode;
9509 /* Verify the required arguments in the original call. */
9510 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9511 INTEGER_TYPE, INTEGER_TYPE,
9512 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9515 fp_nan = CALL_EXPR_ARG (exp, 0);
9516 fp_infinite = CALL_EXPR_ARG (exp, 1);
9517 fp_normal = CALL_EXPR_ARG (exp, 2);
9518 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9519 fp_zero = CALL_EXPR_ARG (exp, 4);
9520 arg = CALL_EXPR_ARG (exp, 5);
9521 type = TREE_TYPE (arg);
9522 mode = TYPE_MODE (type);
9523 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9527 (fabs(x) == Inf ? FP_INFINITE :
9528 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9529 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9531 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9532 build_real (type, dconst0));
9533 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9534 tmp, fp_zero, fp_subnormal);
9536 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9537 real_from_string (&r, buf);
9538 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9539 arg, build_real (type, r));
9540 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9542 if (HONOR_INFINITIES (mode))
9545 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9546 build_real (type, r));
9547 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9551 if (HONOR_NANS (mode))
9553 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9554 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9560 /* Fold a call to an unordered comparison function such as
9561 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9562 being called and ARG0 and ARG1 are the arguments for the call.
9563 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9564 the opposite of the desired result. UNORDERED_CODE is used
9565 for modes that can hold NaNs and ORDERED_CODE is used for
9569 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9570 enum tree_code unordered_code,
9571 enum tree_code ordered_code)
9573 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9574 enum tree_code code;
9576 enum tree_code code0, code1;
9577 tree cmp_type = NULL_TREE;
9579 type0 = TREE_TYPE (arg0);
9580 type1 = TREE_TYPE (arg1);
9582 code0 = TREE_CODE (type0);
9583 code1 = TREE_CODE (type1);
9585 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9586 /* Choose the wider of two real types. */
9587 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9589 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9591 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9594 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9595 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9597 if (unordered_code == UNORDERED_EXPR)
9599 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9600 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9601 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9604 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9606 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9607 fold_build2_loc (loc, code, type, arg0, arg1));
9610 /* Fold a call to built-in function FNDECL with 0 arguments.
9611 IGNORE is true if the result of the function call is ignored. This
9612 function returns NULL_TREE if no simplification was possible. */
9615 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9621 CASE_FLT_FN (BUILT_IN_INF):
9622 case BUILT_IN_INFD32:
9623 case BUILT_IN_INFD64:
9624 case BUILT_IN_INFD128:
9625 return fold_builtin_inf (loc, type, true);
9627 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9628 return fold_builtin_inf (loc, type, false);
9630 case BUILT_IN_CLASSIFY_TYPE:
9631 return fold_builtin_classify_type (NULL_TREE);
9639 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9640 IGNORE is true if the result of the function call is ignored. This
9641 function returns NULL_TREE if no simplification was possible. */
9644 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9646 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9647 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9651 case BUILT_IN_CONSTANT_P:
9653 tree val = fold_builtin_constant_p (arg0);
9655 /* Gimplification will pull the CALL_EXPR for the builtin out of
9656 an if condition. When not optimizing, we'll not CSE it back.
9657 To avoid link error types of regressions, return false now. */
9658 if (!val && !optimize)
9659 val = integer_zero_node;
9664 case BUILT_IN_CLASSIFY_TYPE:
9665 return fold_builtin_classify_type (arg0);
9667 case BUILT_IN_STRLEN:
9668 return fold_builtin_strlen (loc, type, arg0);
9670 CASE_FLT_FN (BUILT_IN_FABS):
9671 return fold_builtin_fabs (loc, arg0, type);
9675 case BUILT_IN_LLABS:
9676 case BUILT_IN_IMAXABS:
9677 return fold_builtin_abs (loc, arg0, type);
9679 CASE_FLT_FN (BUILT_IN_CONJ):
9680 if (validate_arg (arg0, COMPLEX_TYPE)
9681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9682 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9685 CASE_FLT_FN (BUILT_IN_CREAL):
9686 if (validate_arg (arg0, COMPLEX_TYPE)
9687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9688 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9691 CASE_FLT_FN (BUILT_IN_CIMAG):
9692 if (validate_arg (arg0, COMPLEX_TYPE)
9693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9694 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9697 CASE_FLT_FN (BUILT_IN_CCOS):
9698 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9700 CASE_FLT_FN (BUILT_IN_CCOSH):
9701 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9703 CASE_FLT_FN (BUILT_IN_CSIN):
9704 if (validate_arg (arg0, COMPLEX_TYPE)
9705 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9706 return do_mpc_arg1 (arg0, type, mpc_sin);
9709 CASE_FLT_FN (BUILT_IN_CSINH):
9710 if (validate_arg (arg0, COMPLEX_TYPE)
9711 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9712 return do_mpc_arg1 (arg0, type, mpc_sinh);
9715 CASE_FLT_FN (BUILT_IN_CTAN):
9716 if (validate_arg (arg0, COMPLEX_TYPE)
9717 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9718 return do_mpc_arg1 (arg0, type, mpc_tan);
9721 CASE_FLT_FN (BUILT_IN_CTANH):
9722 if (validate_arg (arg0, COMPLEX_TYPE)
9723 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724 return do_mpc_arg1 (arg0, type, mpc_tanh);
9727 CASE_FLT_FN (BUILT_IN_CLOG):
9728 if (validate_arg (arg0, COMPLEX_TYPE)
9729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730 return do_mpc_arg1 (arg0, type, mpc_log);
9733 CASE_FLT_FN (BUILT_IN_CSQRT):
9734 if (validate_arg (arg0, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9739 CASE_FLT_FN (BUILT_IN_CASIN):
9740 if (validate_arg (arg0, COMPLEX_TYPE)
9741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9742 return do_mpc_arg1 (arg0, type, mpc_asin);
9745 CASE_FLT_FN (BUILT_IN_CACOS):
9746 if (validate_arg (arg0, COMPLEX_TYPE)
9747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9748 return do_mpc_arg1 (arg0, type, mpc_acos);
9751 CASE_FLT_FN (BUILT_IN_CATAN):
9752 if (validate_arg (arg0, COMPLEX_TYPE)
9753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9754 return do_mpc_arg1 (arg0, type, mpc_atan);
9757 CASE_FLT_FN (BUILT_IN_CASINH):
9758 if (validate_arg (arg0, COMPLEX_TYPE)
9759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9760 return do_mpc_arg1 (arg0, type, mpc_asinh);
9763 CASE_FLT_FN (BUILT_IN_CACOSH):
9764 if (validate_arg (arg0, COMPLEX_TYPE)
9765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9766 return do_mpc_arg1 (arg0, type, mpc_acosh);
9769 CASE_FLT_FN (BUILT_IN_CATANH):
9770 if (validate_arg (arg0, COMPLEX_TYPE)
9771 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9772 return do_mpc_arg1 (arg0, type, mpc_atanh);
9775 CASE_FLT_FN (BUILT_IN_CABS):
9776 return fold_builtin_cabs (loc, arg0, type, fndecl);
9778 CASE_FLT_FN (BUILT_IN_CARG):
9779 return fold_builtin_carg (loc, arg0, type);
9781 CASE_FLT_FN (BUILT_IN_SQRT):
9782 return fold_builtin_sqrt (loc, arg0, type);
9784 CASE_FLT_FN (BUILT_IN_CBRT):
9785 return fold_builtin_cbrt (loc, arg0, type);
9787 CASE_FLT_FN (BUILT_IN_ASIN):
9788 if (validate_arg (arg0, REAL_TYPE))
9789 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9790 &dconstm1, &dconst1, true);
9793 CASE_FLT_FN (BUILT_IN_ACOS):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9796 &dconstm1, &dconst1, true);
9799 CASE_FLT_FN (BUILT_IN_ATAN):
9800 if (validate_arg (arg0, REAL_TYPE))
9801 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9804 CASE_FLT_FN (BUILT_IN_ASINH):
9805 if (validate_arg (arg0, REAL_TYPE))
9806 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9809 CASE_FLT_FN (BUILT_IN_ACOSH):
9810 if (validate_arg (arg0, REAL_TYPE))
9811 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9812 &dconst1, NULL, true);
9815 CASE_FLT_FN (BUILT_IN_ATANH):
9816 if (validate_arg (arg0, REAL_TYPE))
9817 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9818 &dconstm1, &dconst1, false);
9821 CASE_FLT_FN (BUILT_IN_SIN):
9822 if (validate_arg (arg0, REAL_TYPE))
9823 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9826 CASE_FLT_FN (BUILT_IN_COS):
9827 return fold_builtin_cos (loc, arg0, type, fndecl);
9829 CASE_FLT_FN (BUILT_IN_TAN):
9830 return fold_builtin_tan (arg0, type);
9832 CASE_FLT_FN (BUILT_IN_CEXP):
9833 return fold_builtin_cexp (loc, arg0, type);
9835 CASE_FLT_FN (BUILT_IN_CEXPI):
9836 if (validate_arg (arg0, REAL_TYPE))
9837 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9840 CASE_FLT_FN (BUILT_IN_SINH):
9841 if (validate_arg (arg0, REAL_TYPE))
9842 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9845 CASE_FLT_FN (BUILT_IN_COSH):
9846 return fold_builtin_cosh (loc, arg0, type, fndecl);
9848 CASE_FLT_FN (BUILT_IN_TANH):
9849 if (validate_arg (arg0, REAL_TYPE))
9850 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9853 CASE_FLT_FN (BUILT_IN_ERF):
9854 if (validate_arg (arg0, REAL_TYPE))
9855 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9858 CASE_FLT_FN (BUILT_IN_ERFC):
9859 if (validate_arg (arg0, REAL_TYPE))
9860 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9863 CASE_FLT_FN (BUILT_IN_TGAMMA):
9864 if (validate_arg (arg0, REAL_TYPE))
9865 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9868 CASE_FLT_FN (BUILT_IN_EXP):
9869 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9871 CASE_FLT_FN (BUILT_IN_EXP2):
9872 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9874 CASE_FLT_FN (BUILT_IN_EXP10):
9875 CASE_FLT_FN (BUILT_IN_POW10):
9876 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9878 CASE_FLT_FN (BUILT_IN_EXPM1):
9879 if (validate_arg (arg0, REAL_TYPE))
9880 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9883 CASE_FLT_FN (BUILT_IN_LOG):
9884 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9886 CASE_FLT_FN (BUILT_IN_LOG2):
9887 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9889 CASE_FLT_FN (BUILT_IN_LOG10):
9890 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9892 CASE_FLT_FN (BUILT_IN_LOG1P):
9893 if (validate_arg (arg0, REAL_TYPE))
9894 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9895 &dconstm1, NULL, false);
9898 CASE_FLT_FN (BUILT_IN_J0):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9904 CASE_FLT_FN (BUILT_IN_J1):
9905 if (validate_arg (arg0, REAL_TYPE))
9906 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9910 CASE_FLT_FN (BUILT_IN_Y0):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9913 &dconst0, NULL, false);
9916 CASE_FLT_FN (BUILT_IN_Y1):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9919 &dconst0, NULL, false);
9922 CASE_FLT_FN (BUILT_IN_NAN):
9923 case BUILT_IN_NAND32:
9924 case BUILT_IN_NAND64:
9925 case BUILT_IN_NAND128:
9926 return fold_builtin_nan (arg0, type, true);
9928 CASE_FLT_FN (BUILT_IN_NANS):
9929 return fold_builtin_nan (arg0, type, false);
9931 CASE_FLT_FN (BUILT_IN_FLOOR):
9932 return fold_builtin_floor (loc, fndecl, arg0);
9934 CASE_FLT_FN (BUILT_IN_CEIL):
9935 return fold_builtin_ceil (loc, fndecl, arg0);
9937 CASE_FLT_FN (BUILT_IN_TRUNC):
9938 return fold_builtin_trunc (loc, fndecl, arg0);
9940 CASE_FLT_FN (BUILT_IN_ROUND):
9941 return fold_builtin_round (loc, fndecl, arg0);
9943 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9944 CASE_FLT_FN (BUILT_IN_RINT):
9945 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9947 CASE_FLT_FN (BUILT_IN_LCEIL):
9948 CASE_FLT_FN (BUILT_IN_LLCEIL):
9949 CASE_FLT_FN (BUILT_IN_LFLOOR):
9950 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9951 CASE_FLT_FN (BUILT_IN_LROUND):
9952 CASE_FLT_FN (BUILT_IN_LLROUND):
9953 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9955 CASE_FLT_FN (BUILT_IN_LRINT):
9956 CASE_FLT_FN (BUILT_IN_LLRINT):
9957 return fold_fixed_mathfn (loc, fndecl, arg0);
9959 case BUILT_IN_BSWAP32:
9960 case BUILT_IN_BSWAP64:
9961 return fold_builtin_bswap (fndecl, arg0);
9963 CASE_INT_FN (BUILT_IN_FFS):
9964 CASE_INT_FN (BUILT_IN_CLZ):
9965 CASE_INT_FN (BUILT_IN_CTZ):
9966 CASE_INT_FN (BUILT_IN_POPCOUNT):
9967 CASE_INT_FN (BUILT_IN_PARITY):
9968 return fold_builtin_bitop (fndecl, arg0);
9970 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9971 return fold_builtin_signbit (loc, arg0, type);
9973 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9974 return fold_builtin_significand (loc, arg0, type);
9976 CASE_FLT_FN (BUILT_IN_ILOGB):
9977 CASE_FLT_FN (BUILT_IN_LOGB):
9978 return fold_builtin_logb (loc, arg0, type);
9980 case BUILT_IN_ISASCII:
9981 return fold_builtin_isascii (loc, arg0);
9983 case BUILT_IN_TOASCII:
9984 return fold_builtin_toascii (loc, arg0);
9986 case BUILT_IN_ISDIGIT:
9987 return fold_builtin_isdigit (loc, arg0);
9989 CASE_FLT_FN (BUILT_IN_FINITE):
9990 case BUILT_IN_FINITED32:
9991 case BUILT_IN_FINITED64:
9992 case BUILT_IN_FINITED128:
9993 case BUILT_IN_ISFINITE:
9995 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9998 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10001 CASE_FLT_FN (BUILT_IN_ISINF):
10002 case BUILT_IN_ISINFD32:
10003 case BUILT_IN_ISINFD64:
10004 case BUILT_IN_ISINFD128:
10006 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10009 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10012 case BUILT_IN_ISNORMAL:
10013 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10015 case BUILT_IN_ISINF_SIGN:
10016 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10018 CASE_FLT_FN (BUILT_IN_ISNAN):
10019 case BUILT_IN_ISNAND32:
10020 case BUILT_IN_ISNAND64:
10021 case BUILT_IN_ISNAND128:
10022 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10024 case BUILT_IN_PRINTF:
10025 case BUILT_IN_PRINTF_UNLOCKED:
10026 case BUILT_IN_VPRINTF:
10027 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10037 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10038 IGNORE is true if the result of the function call is ignored. This
10039 function returns NULL_TREE if no simplification was possible. */
10042 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10044 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10045 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10049 CASE_FLT_FN (BUILT_IN_JN):
10050 if (validate_arg (arg0, INTEGER_TYPE)
10051 && validate_arg (arg1, REAL_TYPE))
10052 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10055 CASE_FLT_FN (BUILT_IN_YN):
10056 if (validate_arg (arg0, INTEGER_TYPE)
10057 && validate_arg (arg1, REAL_TYPE))
10058 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10062 CASE_FLT_FN (BUILT_IN_DREM):
10063 CASE_FLT_FN (BUILT_IN_REMAINDER):
10064 if (validate_arg (arg0, REAL_TYPE)
10065 && validate_arg(arg1, REAL_TYPE))
10066 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10069 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10070 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10071 if (validate_arg (arg0, REAL_TYPE)
10072 && validate_arg(arg1, POINTER_TYPE))
10073 return do_mpfr_lgamma_r (arg0, arg1, type);
10076 CASE_FLT_FN (BUILT_IN_ATAN2):
10077 if (validate_arg (arg0, REAL_TYPE)
10078 && validate_arg(arg1, REAL_TYPE))
10079 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10082 CASE_FLT_FN (BUILT_IN_FDIM):
10083 if (validate_arg (arg0, REAL_TYPE)
10084 && validate_arg(arg1, REAL_TYPE))
10085 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10088 CASE_FLT_FN (BUILT_IN_HYPOT):
10089 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10091 CASE_FLT_FN (BUILT_IN_CPOW):
10092 if (validate_arg (arg0, COMPLEX_TYPE)
10093 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10094 && validate_arg (arg1, COMPLEX_TYPE)
10095 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10096 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10099 CASE_FLT_FN (BUILT_IN_LDEXP):
10100 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10101 CASE_FLT_FN (BUILT_IN_SCALBN):
10102 CASE_FLT_FN (BUILT_IN_SCALBLN):
10103 return fold_builtin_load_exponent (loc, arg0, arg1,
10104 type, /*ldexp=*/false);
10106 CASE_FLT_FN (BUILT_IN_FREXP):
10107 return fold_builtin_frexp (loc, arg0, arg1, type);
10109 CASE_FLT_FN (BUILT_IN_MODF):
10110 return fold_builtin_modf (loc, arg0, arg1, type);
10112 case BUILT_IN_BZERO:
10113 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10115 case BUILT_IN_FPUTS:
10116 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10118 case BUILT_IN_FPUTS_UNLOCKED:
10119 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10121 case BUILT_IN_STRSTR:
10122 return fold_builtin_strstr (loc, arg0, arg1, type);
10124 case BUILT_IN_STRCAT:
10125 return fold_builtin_strcat (loc, arg0, arg1);
10127 case BUILT_IN_STRSPN:
10128 return fold_builtin_strspn (loc, arg0, arg1);
10130 case BUILT_IN_STRCSPN:
10131 return fold_builtin_strcspn (loc, arg0, arg1);
10133 case BUILT_IN_STRCHR:
10134 case BUILT_IN_INDEX:
10135 return fold_builtin_strchr (loc, arg0, arg1, type);
10137 case BUILT_IN_STRRCHR:
10138 case BUILT_IN_RINDEX:
10139 return fold_builtin_strrchr (loc, arg0, arg1, type);
10141 case BUILT_IN_STRCPY:
10142 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10144 case BUILT_IN_STPCPY:
10147 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10151 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10154 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10157 case BUILT_IN_STRCMP:
10158 return fold_builtin_strcmp (loc, arg0, arg1);
10160 case BUILT_IN_STRPBRK:
10161 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10163 case BUILT_IN_EXPECT:
10164 return fold_builtin_expect (loc, arg0, arg1);
10166 CASE_FLT_FN (BUILT_IN_POW):
10167 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10169 CASE_FLT_FN (BUILT_IN_POWI):
10170 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10172 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10173 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10175 CASE_FLT_FN (BUILT_IN_FMIN):
10176 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10178 CASE_FLT_FN (BUILT_IN_FMAX):
10179 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10181 case BUILT_IN_ISGREATER:
10182 return fold_builtin_unordered_cmp (loc, fndecl,
10183 arg0, arg1, UNLE_EXPR, LE_EXPR);
10184 case BUILT_IN_ISGREATEREQUAL:
10185 return fold_builtin_unordered_cmp (loc, fndecl,
10186 arg0, arg1, UNLT_EXPR, LT_EXPR);
10187 case BUILT_IN_ISLESS:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNGE_EXPR, GE_EXPR);
10190 case BUILT_IN_ISLESSEQUAL:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNGT_EXPR, GT_EXPR);
10193 case BUILT_IN_ISLESSGREATER:
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10196 case BUILT_IN_ISUNORDERED:
10197 return fold_builtin_unordered_cmp (loc, fndecl,
10198 arg0, arg1, UNORDERED_EXPR,
10201 /* We do the folding for va_start in the expander. */
10202 case BUILT_IN_VA_START:
10205 case BUILT_IN_SPRINTF:
10206 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10208 case BUILT_IN_OBJECT_SIZE:
10209 return fold_builtin_object_size (arg0, arg1);
10211 case BUILT_IN_PRINTF:
10212 case BUILT_IN_PRINTF_UNLOCKED:
10213 case BUILT_IN_VPRINTF:
10214 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10216 case BUILT_IN_PRINTF_CHK:
10217 case BUILT_IN_VPRINTF_CHK:
10218 if (!validate_arg (arg0, INTEGER_TYPE)
10219 || TREE_SIDE_EFFECTS (arg0))
10222 return fold_builtin_printf (loc, fndecl,
10223 arg1, NULL_TREE, ignore, fcode);
10226 case BUILT_IN_FPRINTF:
10227 case BUILT_IN_FPRINTF_UNLOCKED:
10228 case BUILT_IN_VFPRINTF:
10229 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10238 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10239 and ARG2. IGNORE is true if the result of the function call is ignored.
10240 This function returns NULL_TREE if no simplification was possible. */
10243 fold_builtin_3 (location_t loc, tree fndecl,
10244 tree arg0, tree arg1, tree arg2, bool ignore)
10246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10247 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10251 CASE_FLT_FN (BUILT_IN_SINCOS):
10252 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10254 CASE_FLT_FN (BUILT_IN_FMA):
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg(arg1, REAL_TYPE)
10257 && validate_arg(arg2, REAL_TYPE))
10258 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10261 CASE_FLT_FN (BUILT_IN_REMQUO):
10262 if (validate_arg (arg0, REAL_TYPE)
10263 && validate_arg(arg1, REAL_TYPE)
10264 && validate_arg(arg2, POINTER_TYPE))
10265 return do_mpfr_remquo (arg0, arg1, arg2);
10268 case BUILT_IN_MEMSET:
10269 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10271 case BUILT_IN_BCOPY:
10272 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10273 void_type_node, true, /*endp=*/3);
10275 case BUILT_IN_MEMCPY:
10276 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10277 type, ignore, /*endp=*/0);
10279 case BUILT_IN_MEMPCPY:
10280 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10281 type, ignore, /*endp=*/1);
10283 case BUILT_IN_MEMMOVE:
10284 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10285 type, ignore, /*endp=*/3);
10287 case BUILT_IN_STRNCAT:
10288 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10290 case BUILT_IN_STRNCPY:
10291 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10293 case BUILT_IN_STRNCMP:
10294 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10296 case BUILT_IN_MEMCHR:
10297 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10299 case BUILT_IN_BCMP:
10300 case BUILT_IN_MEMCMP:
10301 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10303 case BUILT_IN_SPRINTF:
10304 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10306 case BUILT_IN_STRCPY_CHK:
10307 case BUILT_IN_STPCPY_CHK:
10308 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10311 case BUILT_IN_STRCAT_CHK:
10312 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10314 case BUILT_IN_PRINTF_CHK:
10315 case BUILT_IN_VPRINTF_CHK:
10316 if (!validate_arg (arg0, INTEGER_TYPE)
10317 || TREE_SIDE_EFFECTS (arg0))
10320 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10323 case BUILT_IN_FPRINTF:
10324 case BUILT_IN_FPRINTF_UNLOCKED:
10325 case BUILT_IN_VFPRINTF:
10326 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10329 case BUILT_IN_FPRINTF_CHK:
10330 case BUILT_IN_VFPRINTF_CHK:
10331 if (!validate_arg (arg1, INTEGER_TYPE)
10332 || TREE_SIDE_EFFECTS (arg1))
10335 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10344 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10345 ARG2, and ARG3. IGNORE is true if the result of the function call is
10346 ignored. This function returns NULL_TREE if no simplification was
10350 fold_builtin_4 (location_t loc, tree fndecl,
10351 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10353 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10357 case BUILT_IN_MEMCPY_CHK:
10358 case BUILT_IN_MEMPCPY_CHK:
10359 case BUILT_IN_MEMMOVE_CHK:
10360 case BUILT_IN_MEMSET_CHK:
10361 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10363 DECL_FUNCTION_CODE (fndecl));
10365 case BUILT_IN_STRNCPY_CHK:
10366 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10368 case BUILT_IN_STRNCAT_CHK:
10369 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10371 case BUILT_IN_FPRINTF_CHK:
10372 case BUILT_IN_VFPRINTF_CHK:
10373 if (!validate_arg (arg1, INTEGER_TYPE)
10374 || TREE_SIDE_EFFECTS (arg1))
10377 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10387 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10388 arguments, where NARGS <= 4. IGNORE is true if the result of the
10389 function call is ignored. This function returns NULL_TREE if no
10390 simplification was possible. Note that this only folds builtins with
10391 fixed argument patterns. Foldings that do varargs-to-varargs
10392 transformations, or that match calls with more than 4 arguments,
10393 need to be handled with fold_builtin_varargs instead. */
10395 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10398 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10400 tree ret = NULL_TREE;
10405 ret = fold_builtin_0 (loc, fndecl, ignore);
10408 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10411 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10414 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10417 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10425 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10426 SET_EXPR_LOCATION (ret, loc);
10427 TREE_NO_WARNING (ret) = 1;
10433 /* Builtins with folding operations that operate on "..." arguments
10434 need special handling; we need to store the arguments in a convenient
10435 data structure before attempting any folding. Fortunately there are
10436 only a few builtins that fall into this category. FNDECL is the
10437 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10438 result of the function call is ignored. */
10441 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10442 bool ignore ATTRIBUTE_UNUSED)
10444 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10445 tree ret = NULL_TREE;
10449 case BUILT_IN_SPRINTF_CHK:
10450 case BUILT_IN_VSPRINTF_CHK:
10451 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10454 case BUILT_IN_SNPRINTF_CHK:
10455 case BUILT_IN_VSNPRINTF_CHK:
10456 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10459 case BUILT_IN_FPCLASSIFY:
10460 ret = fold_builtin_fpclassify (loc, exp);
10468 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10469 SET_EXPR_LOCATION (ret, loc);
10470 TREE_NO_WARNING (ret) = 1;
10476 /* Return true if FNDECL shouldn't be folded right now.
10477 If a built-in function has an inline attribute always_inline
10478 wrapper, defer folding it after always_inline functions have
10479 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10480 might not be performed. */
10483 avoid_folding_inline_builtin (tree fndecl)
10485 return (DECL_DECLARED_INLINE_P (fndecl)
10486 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10488 && !cfun->always_inline_functions_inlined
10489 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10492 /* A wrapper function for builtin folding that prevents warnings for
10493 "statement without effect" and the like, caused by removing the
10494 call node earlier than the warning is generated. */
10497 fold_call_expr (location_t loc, tree exp, bool ignore)
10499 tree ret = NULL_TREE;
10500 tree fndecl = get_callee_fndecl (exp);
10502 && TREE_CODE (fndecl) == FUNCTION_DECL
10503 && DECL_BUILT_IN (fndecl)
10504 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10505 yet. Defer folding until we see all the arguments
10506 (after inlining). */
10507 && !CALL_EXPR_VA_ARG_PACK (exp))
10509 int nargs = call_expr_nargs (exp);
10511 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10512 instead last argument is __builtin_va_arg_pack (). Defer folding
10513 even in that case, until arguments are finalized. */
10514 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10516 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10518 && TREE_CODE (fndecl2) == FUNCTION_DECL
10519 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10520 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10524 if (avoid_folding_inline_builtin (fndecl))
10527 /* FIXME: Don't use a list in this interface. */
10528 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10529 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10532 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10534 tree *args = CALL_EXPR_ARGP (exp);
10535 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10538 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10546 /* Conveniently construct a function call expression. FNDECL names the
10547 function to be called and ARGLIST is a TREE_LIST of arguments. */
10550 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10552 tree fntype = TREE_TYPE (fndecl);
10553 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10554 int n = list_length (arglist);
10555 tree *argarray = (tree *) alloca (n * sizeof (tree));
10558 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10559 argarray[i] = TREE_VALUE (arglist);
10560 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10563 /* Conveniently construct a function call expression. FNDECL names the
10564 function to be called, N is the number of arguments, and the "..."
10565 parameters are the argument expressions. */
10568 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10571 tree fntype = TREE_TYPE (fndecl);
10572 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10573 tree *argarray = (tree *) alloca (n * sizeof (tree));
10577 for (i = 0; i < n; i++)
10578 argarray[i] = va_arg (ap, tree);
10580 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10583 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10584 N arguments are passed in the array ARGARRAY. */
10587 fold_builtin_call_array (location_t loc, tree type,
10592 tree ret = NULL_TREE;
10596 if (TREE_CODE (fn) == ADDR_EXPR)
10598 tree fndecl = TREE_OPERAND (fn, 0);
10599 if (TREE_CODE (fndecl) == FUNCTION_DECL
10600 && DECL_BUILT_IN (fndecl))
10602 /* If last argument is __builtin_va_arg_pack (), arguments to this
10603 function are not finalized yet. Defer folding until they are. */
10604 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10606 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10608 && TREE_CODE (fndecl2) == FUNCTION_DECL
10609 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10610 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10611 return build_call_array_loc (loc, type, fn, n, argarray);
10613 if (avoid_folding_inline_builtin (fndecl))
10614 return build_call_array_loc (loc, type, fn, n, argarray);
10615 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10617 tree arglist = NULL_TREE;
10618 for (i = n - 1; i >= 0; i--)
10619 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10620 ret = targetm.fold_builtin (fndecl, arglist, false);
10623 return build_call_array_loc (loc, type, fn, n, argarray);
10625 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10627 /* First try the transformations that don't require consing up
10629 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10634 /* If we got this far, we need to build an exp. */
10635 exp = build_call_array_loc (loc, type, fn, n, argarray);
10636 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10637 return ret ? ret : exp;
10641 return build_call_array_loc (loc, type, fn, n, argarray);
10644 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10645 along with N new arguments specified as the "..." parameters. SKIP
10646 is the number of arguments in EXP to be omitted. This function is used
10647 to do varargs-to-varargs transformations. */
10650 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10652 int oldnargs = call_expr_nargs (exp);
10653 int nargs = oldnargs - skip + n;
10654 tree fntype = TREE_TYPE (fndecl);
10655 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10663 buffer = XALLOCAVEC (tree, nargs);
10665 for (i = 0; i < n; i++)
10666 buffer[i] = va_arg (ap, tree);
10668 for (j = skip; j < oldnargs; j++, i++)
10669 buffer[i] = CALL_EXPR_ARG (exp, j);
10672 buffer = CALL_EXPR_ARGP (exp) + skip;
10674 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10677 /* Validate a single argument ARG against a tree code CODE representing
10681 validate_arg (const_tree arg, enum tree_code code)
10685 else if (code == POINTER_TYPE)
10686 return POINTER_TYPE_P (TREE_TYPE (arg));
10687 else if (code == INTEGER_TYPE)
10688 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10689 return code == TREE_CODE (TREE_TYPE (arg));
10692 /* This function validates the types of a function call argument list
10693 against a specified list of tree_codes. If the last specifier is a 0,
10694 that represents an ellipses, otherwise the last specifier must be a
10697 This is the GIMPLE version of validate_arglist. Eventually we want to
10698 completely convert builtins.c to work from GIMPLEs and the tree based
10699 validate_arglist will then be removed. */
10702 validate_gimple_arglist (const_gimple call, ...)
10704 enum tree_code code;
10710 va_start (ap, call);
10715 code = (enum tree_code) va_arg (ap, int);
10719 /* This signifies an ellipses, any further arguments are all ok. */
10723 /* This signifies an endlink, if no arguments remain, return
10724 true, otherwise return false. */
10725 res = (i == gimple_call_num_args (call));
10728 /* If no parameters remain or the parameter's code does not
10729 match the specified code, return false. Otherwise continue
10730 checking any remaining arguments. */
10731 arg = gimple_call_arg (call, i++);
10732 if (!validate_arg (arg, code))
10739 /* We need gotos here since we can only have one VA_CLOSE in a
10747 /* This function validates the types of a function call argument list
10748 against a specified list of tree_codes. If the last specifier is a 0,
10749 that represents an ellipses, otherwise the last specifier must be a
10753 validate_arglist (const_tree callexpr, ...)
10755 enum tree_code code;
10758 const_call_expr_arg_iterator iter;
10761 va_start (ap, callexpr);
10762 init_const_call_expr_arg_iterator (callexpr, &iter);
10766 code = (enum tree_code) va_arg (ap, int);
10770 /* This signifies an ellipses, any further arguments are all ok. */
10774 /* This signifies an endlink, if no arguments remain, return
10775 true, otherwise return false. */
10776 res = !more_const_call_expr_args_p (&iter);
10779 /* If no parameters remain or the parameter's code does not
10780 match the specified code, return false. Otherwise continue
10781 checking any remaining arguments. */
10782 arg = next_const_call_expr_arg (&iter);
10783 if (!validate_arg (arg, code))
10790 /* We need gotos here since we can only have one VA_CLOSE in a
10798 /* Default target-specific builtin expander that does nothing. */
10801 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10802 rtx target ATTRIBUTE_UNUSED,
10803 rtx subtarget ATTRIBUTE_UNUSED,
10804 enum machine_mode mode ATTRIBUTE_UNUSED,
10805 int ignore ATTRIBUTE_UNUSED)
10810 /* Returns true is EXP represents data that would potentially reside
10811 in a readonly section. */
10814 readonly_data_expr (tree exp)
10818 if (TREE_CODE (exp) != ADDR_EXPR)
10821 exp = get_base_address (TREE_OPERAND (exp, 0));
10825 /* Make sure we call decl_readonly_section only for trees it
10826 can handle (since it returns true for everything it doesn't
10828 if (TREE_CODE (exp) == STRING_CST
10829 || TREE_CODE (exp) == CONSTRUCTOR
10830 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10831 return decl_readonly_section (exp, 0);
10836 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10837 to the call, and TYPE is its return type.
10839 Return NULL_TREE if no simplification was possible, otherwise return the
10840 simplified form of the call as a tree.
10842 The simplified form may be a constant or other expression which
10843 computes the same value, but in a more efficient manner (including
10844 calls to other builtin functions).
10846 The call may contain arguments which need to be evaluated, but
10847 which are not useful to determine the result of the call. In
10848 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10849 COMPOUND_EXPR will be an argument which must be evaluated.
10850 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10851 COMPOUND_EXPR in the chain will contain the tree for the simplified
10852 form of the builtin function call. */
10855 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10857 if (!validate_arg (s1, POINTER_TYPE)
10858 || !validate_arg (s2, POINTER_TYPE))
10863 const char *p1, *p2;
10865 p2 = c_getstr (s2);
10869 p1 = c_getstr (s1);
10872 const char *r = strstr (p1, p2);
10876 return build_int_cst (TREE_TYPE (s1), 0);
10878 /* Return an offset into the constant string argument. */
10879 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10880 s1, size_int (r - p1));
10881 return fold_convert_loc (loc, type, tem);
10884 /* The argument is const char *, and the result is char *, so we need
10885 a type conversion here to avoid a warning. */
10887 return fold_convert_loc (loc, type, s1);
10892 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10896 /* New argument list transforming strstr(s1, s2) to
10897 strchr(s1, s2[0]). */
10898 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10902 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10903 the call, and TYPE is its return type.
10905 Return NULL_TREE if no simplification was possible, otherwise return the
10906 simplified form of the call as a tree.
10908 The simplified form may be a constant or other expression which
10909 computes the same value, but in a more efficient manner (including
10910 calls to other builtin functions).
10912 The call may contain arguments which need to be evaluated, but
10913 which are not useful to determine the result of the call. In
10914 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10915 COMPOUND_EXPR will be an argument which must be evaluated.
10916 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10917 COMPOUND_EXPR in the chain will contain the tree for the simplified
10918 form of the builtin function call. */
10921 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10923 if (!validate_arg (s1, POINTER_TYPE)
10924 || !validate_arg (s2, INTEGER_TYPE))
10930 if (TREE_CODE (s2) != INTEGER_CST)
10933 p1 = c_getstr (s1);
10940 if (target_char_cast (s2, &c))
10943 r = strchr (p1, c);
10946 return build_int_cst (TREE_TYPE (s1), 0);
10948 /* Return an offset into the constant string argument. */
10949 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10950 s1, size_int (r - p1));
10951 return fold_convert_loc (loc, type, tem);
10957 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10958 the call, and TYPE is its return type.
10960 Return NULL_TREE if no simplification was possible, otherwise return the
10961 simplified form of the call as a tree.
10963 The simplified form may be a constant or other expression which
10964 computes the same value, but in a more efficient manner (including
10965 calls to other builtin functions).
10967 The call may contain arguments which need to be evaluated, but
10968 which are not useful to determine the result of the call. In
10969 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10970 COMPOUND_EXPR will be an argument which must be evaluated.
10971 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10972 COMPOUND_EXPR in the chain will contain the tree for the simplified
10973 form of the builtin function call. */
10976 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10978 if (!validate_arg (s1, POINTER_TYPE)
10979 || !validate_arg (s2, INTEGER_TYPE))
10986 if (TREE_CODE (s2) != INTEGER_CST)
10989 p1 = c_getstr (s1);
10996 if (target_char_cast (s2, &c))
10999 r = strrchr (p1, c);
11002 return build_int_cst (TREE_TYPE (s1), 0);
11004 /* Return an offset into the constant string argument. */
11005 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11006 s1, size_int (r - p1));
11007 return fold_convert_loc (loc, type, tem);
11010 if (! integer_zerop (s2))
11013 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11017 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11018 return build_call_expr_loc (loc, fn, 2, s1, s2);
11022 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11023 to the call, and TYPE is its return type.
11025 Return NULL_TREE if no simplification was possible, otherwise return the
11026 simplified form of the call as a tree.
11028 The simplified form may be a constant or other expression which
11029 computes the same value, but in a more efficient manner (including
11030 calls to other builtin functions).
11032 The call may contain arguments which need to be evaluated, but
11033 which are not useful to determine the result of the call. In
11034 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11035 COMPOUND_EXPR will be an argument which must be evaluated.
11036 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11037 COMPOUND_EXPR in the chain will contain the tree for the simplified
11038 form of the builtin function call. */
11041 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11043 if (!validate_arg (s1, POINTER_TYPE)
11044 || !validate_arg (s2, POINTER_TYPE))
11049 const char *p1, *p2;
11051 p2 = c_getstr (s2);
11055 p1 = c_getstr (s1);
11058 const char *r = strpbrk (p1, p2);
11062 return build_int_cst (TREE_TYPE (s1), 0);
11064 /* Return an offset into the constant string argument. */
11065 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11066 s1, size_int (r - p1));
11067 return fold_convert_loc (loc, type, tem);
11071 /* strpbrk(x, "") == NULL.
11072 Evaluate and ignore s1 in case it had side-effects. */
11073 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11076 return NULL_TREE; /* Really call strpbrk. */
11078 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11082 /* New argument list transforming strpbrk(s1, s2) to
11083 strchr(s1, s2[0]). */
11084 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11088 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11091 Return NULL_TREE if no simplification was possible, otherwise return the
11092 simplified form of the call as a tree.
11094 The simplified form may be a constant or other expression which
11095 computes the same value, but in a more efficient manner (including
11096 calls to other builtin functions).
11098 The call may contain arguments which need to be evaluated, but
11099 which are not useful to determine the result of the call. In
11100 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11101 COMPOUND_EXPR will be an argument which must be evaluated.
11102 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11103 COMPOUND_EXPR in the chain will contain the tree for the simplified
11104 form of the builtin function call. */
11107 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11109 if (!validate_arg (dst, POINTER_TYPE)
11110 || !validate_arg (src, POINTER_TYPE))
11114 const char *p = c_getstr (src);
11116 /* If the string length is zero, return the dst parameter. */
11117 if (p && *p == '\0')
11120 if (optimize_insn_for_speed_p ())
11122 /* See if we can store by pieces into (dst + strlen(dst)). */
11124 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11125 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11127 if (!strlen_fn || !strcpy_fn)
11130 /* If we don't have a movstr we don't want to emit an strcpy
11131 call. We have to do that if the length of the source string
11132 isn't computable (in that case we can use memcpy probably
11133 later expanding to a sequence of mov instructions). If we
11134 have movstr instructions we can emit strcpy calls. */
11137 tree len = c_strlen (src, 1);
11138 if (! len || TREE_SIDE_EFFECTS (len))
11142 /* Stabilize the argument list. */
11143 dst = builtin_save_expr (dst);
11145 /* Create strlen (dst). */
11146 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11147 /* Create (dst p+ strlen (dst)). */
11149 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11150 TREE_TYPE (dst), dst, newdst);
11151 newdst = builtin_save_expr (newdst);
11153 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11154 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11160 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11161 arguments to the call.
11163 Return NULL_TREE if no simplification was possible, otherwise return the
11164 simplified form of the call as a tree.
11166 The simplified form may be a constant or other expression which
11167 computes the same value, but in a more efficient manner (including
11168 calls to other builtin functions).
11170 The call may contain arguments which need to be evaluated, but
11171 which are not useful to determine the result of the call. In
11172 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11173 COMPOUND_EXPR will be an argument which must be evaluated.
11174 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11175 COMPOUND_EXPR in the chain will contain the tree for the simplified
11176 form of the builtin function call. */
11179 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11181 if (!validate_arg (dst, POINTER_TYPE)
11182 || !validate_arg (src, POINTER_TYPE)
11183 || !validate_arg (len, INTEGER_TYPE))
11187 const char *p = c_getstr (src);
11189 /* If the requested length is zero, or the src parameter string
11190 length is zero, return the dst parameter. */
11191 if (integer_zerop (len) || (p && *p == '\0'))
11192 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11194 /* If the requested len is greater than or equal to the string
11195 length, call strcat. */
11196 if (TREE_CODE (len) == INTEGER_CST && p
11197 && compare_tree_int (len, strlen (p)) >= 0)
11199 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11201 /* If the replacement _DECL isn't initialized, don't do the
11206 return build_call_expr_loc (loc, fn, 2, dst, src);
11212 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11215 Return NULL_TREE if no simplification was possible, otherwise return the
11216 simplified form of the call as a tree.
11218 The simplified form may be a constant or other expression which
11219 computes the same value, but in a more efficient manner (including
11220 calls to other builtin functions).
11222 The call may contain arguments which need to be evaluated, but
11223 which are not useful to determine the result of the call. In
11224 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11225 COMPOUND_EXPR will be an argument which must be evaluated.
11226 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11227 COMPOUND_EXPR in the chain will contain the tree for the simplified
11228 form of the builtin function call. */
11231 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11233 if (!validate_arg (s1, POINTER_TYPE)
11234 || !validate_arg (s2, POINTER_TYPE))
11238 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11240 /* If both arguments are constants, evaluate at compile-time. */
11243 const size_t r = strspn (p1, p2);
11244 return size_int (r);
11247 /* If either argument is "", return NULL_TREE. */
11248 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11249 /* Evaluate and ignore both arguments in case either one has
11251 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11257 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11260 Return NULL_TREE if no simplification was possible, otherwise return the
11261 simplified form of the call as a tree.
11263 The simplified form may be a constant or other expression which
11264 computes the same value, but in a more efficient manner (including
11265 calls to other builtin functions).
11267 The call may contain arguments which need to be evaluated, but
11268 which are not useful to determine the result of the call. In
11269 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11270 COMPOUND_EXPR will be an argument which must be evaluated.
11271 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11272 COMPOUND_EXPR in the chain will contain the tree for the simplified
11273 form of the builtin function call. */
11276 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11278 if (!validate_arg (s1, POINTER_TYPE)
11279 || !validate_arg (s2, POINTER_TYPE))
11283 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11285 /* If both arguments are constants, evaluate at compile-time. */
11288 const size_t r = strcspn (p1, p2);
11289 return size_int (r);
11292 /* If the first argument is "", return NULL_TREE. */
11293 if (p1 && *p1 == '\0')
11295 /* Evaluate and ignore argument s2 in case it has
11297 return omit_one_operand_loc (loc, size_type_node,
11298 size_zero_node, s2);
11301 /* If the second argument is "", return __builtin_strlen(s1). */
11302 if (p2 && *p2 == '\0')
11304 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11306 /* If the replacement _DECL isn't initialized, don't do the
11311 return build_call_expr_loc (loc, fn, 1, s1);
11317 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11318 to the call. IGNORE is true if the value returned
11319 by the builtin will be ignored. UNLOCKED is true is true if this
11320 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11321 the known length of the string. Return NULL_TREE if no simplification
11325 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11326 bool ignore, bool unlocked, tree len)
11328 /* If we're using an unlocked function, assume the other unlocked
11329 functions exist explicitly. */
11330 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11331 : implicit_built_in_decls[BUILT_IN_FPUTC];
11332 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11333 : implicit_built_in_decls[BUILT_IN_FWRITE];
11335 /* If the return value is used, don't do the transformation. */
11339 /* Verify the arguments in the original call. */
11340 if (!validate_arg (arg0, POINTER_TYPE)
11341 || !validate_arg (arg1, POINTER_TYPE))
11345 len = c_strlen (arg0, 0);
11347 /* Get the length of the string passed to fputs. If the length
11348 can't be determined, punt. */
11350 || TREE_CODE (len) != INTEGER_CST)
11353 switch (compare_tree_int (len, 1))
11355 case -1: /* length is 0, delete the call entirely . */
11356 return omit_one_operand_loc (loc, integer_type_node,
11357 integer_zero_node, arg1);;
11359 case 0: /* length is 1, call fputc. */
11361 const char *p = c_getstr (arg0);
11366 return build_call_expr_loc (loc, fn_fputc, 2,
11367 build_int_cst (NULL_TREE, p[0]), arg1);
11373 case 1: /* length is greater than 1, call fwrite. */
11375 /* If optimizing for size keep fputs. */
11376 if (optimize_function_for_size_p (cfun))
11378 /* New argument list transforming fputs(string, stream) to
11379 fwrite(string, 1, len, stream). */
11381 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11382 size_one_node, len, arg1);
11387 gcc_unreachable ();
11392 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11393 produced. False otherwise. This is done so that we don't output the error
11394 or warning twice or three times. */
11397 fold_builtin_next_arg (tree exp, bool va_start_p)
11399 tree fntype = TREE_TYPE (current_function_decl);
11400 int nargs = call_expr_nargs (exp);
11403 if (TYPE_ARG_TYPES (fntype) == 0
11404 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11405 == void_type_node))
11407 error ("%<va_start%> used in function with fixed args");
11413 if (va_start_p && (nargs != 2))
11415 error ("wrong number of arguments to function %<va_start%>");
11418 arg = CALL_EXPR_ARG (exp, 1);
11420 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11421 when we checked the arguments and if needed issued a warning. */
11426 /* Evidently an out of date version of <stdarg.h>; can't validate
11427 va_start's second argument, but can still work as intended. */
11428 warning (0, "%<__builtin_next_arg%> called without an argument");
11431 else if (nargs > 1)
11433 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11436 arg = CALL_EXPR_ARG (exp, 0);
11439 if (TREE_CODE (arg) == SSA_NAME)
11440 arg = SSA_NAME_VAR (arg);
11442 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11443 or __builtin_next_arg (0) the first time we see it, after checking
11444 the arguments and if needed issuing a warning. */
11445 if (!integer_zerop (arg))
11447 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11449 /* Strip off all nops for the sake of the comparison. This
11450 is not quite the same as STRIP_NOPS. It does more.
11451 We must also strip off INDIRECT_EXPR for C++ reference
11453 while (CONVERT_EXPR_P (arg)
11454 || TREE_CODE (arg) == INDIRECT_REF)
11455 arg = TREE_OPERAND (arg, 0);
11456 if (arg != last_parm)
11458 /* FIXME: Sometimes with the tree optimizers we can get the
11459 not the last argument even though the user used the last
11460 argument. We just warn and set the arg to be the last
11461 argument so that we will get wrong-code because of
11463 warning (0, "second parameter of %<va_start%> not last named argument");
11466 /* Undefined by C99 7.15.1.4p4 (va_start):
11467 "If the parameter parmN is declared with the register storage
11468 class, with a function or array type, or with a type that is
11469 not compatible with the type that results after application of
11470 the default argument promotions, the behavior is undefined."
11472 else if (DECL_REGISTER (arg))
11473 warning (0, "undefined behaviour when second parameter of "
11474 "%<va_start%> is declared with %<register%> storage");
11476 /* We want to verify the second parameter just once before the tree
11477 optimizers are run and then avoid keeping it in the tree,
11478 as otherwise we could warn even for correct code like:
11479 void foo (int i, ...)
11480 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11482 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11484 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11490 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11491 ORIG may be null if this is a 2-argument call. We don't attempt to
11492 simplify calls with more than 3 arguments.
11494 Return NULL_TREE if no simplification was possible, otherwise return the
11495 simplified form of the call as a tree. If IGNORED is true, it means that
11496 the caller does not use the returned value of the function. */
11499 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11500 tree orig, int ignored)
11503 const char *fmt_str = NULL;
11505 /* Verify the required arguments in the original call. We deal with two
11506 types of sprintf() calls: 'sprintf (str, fmt)' and
11507 'sprintf (dest, "%s", orig)'. */
11508 if (!validate_arg (dest, POINTER_TYPE)
11509 || !validate_arg (fmt, POINTER_TYPE))
11511 if (orig && !validate_arg (orig, POINTER_TYPE))
11514 /* Check whether the format is a literal string constant. */
11515 fmt_str = c_getstr (fmt);
11516 if (fmt_str == NULL)
11520 retval = NULL_TREE;
11522 if (!init_target_chars ())
11525 /* If the format doesn't contain % args or %%, use strcpy. */
11526 if (strchr (fmt_str, target_percent) == NULL)
11528 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11533 /* Don't optimize sprintf (buf, "abc", ptr++). */
11537 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11538 'format' is known to contain no % formats. */
11539 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11541 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11544 /* If the format is "%s", use strcpy if the result isn't used. */
11545 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11548 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11553 /* Don't crash on sprintf (str1, "%s"). */
11557 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11560 retval = c_strlen (orig, 1);
11561 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11564 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11567 if (call && retval)
11569 retval = fold_convert_loc
11570 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11572 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11578 /* Expand a call EXP to __builtin_object_size. */
11581 expand_builtin_object_size (tree exp)
11584 int object_size_type;
11585 tree fndecl = get_callee_fndecl (exp);
11587 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11589 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11591 expand_builtin_trap ();
11595 ost = CALL_EXPR_ARG (exp, 1);
11598 if (TREE_CODE (ost) != INTEGER_CST
11599 || tree_int_cst_sgn (ost) < 0
11600 || compare_tree_int (ost, 3) > 0)
11602 error ("%Klast argument of %D is not integer constant between 0 and 3",
11604 expand_builtin_trap ();
11608 object_size_type = tree_low_cst (ost, 0);
11610 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11613 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11614 FCODE is the BUILT_IN_* to use.
11615 Return NULL_RTX if we failed; the caller should emit a normal call,
11616 otherwise try to get the result in TARGET, if convenient (and in
11617 mode MODE if that's convenient). */
11620 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11621 enum built_in_function fcode)
11623 tree dest, src, len, size;
11625 if (!validate_arglist (exp,
11627 fcode == BUILT_IN_MEMSET_CHK
11628 ? INTEGER_TYPE : POINTER_TYPE,
11629 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11632 dest = CALL_EXPR_ARG (exp, 0);
11633 src = CALL_EXPR_ARG (exp, 1);
11634 len = CALL_EXPR_ARG (exp, 2);
11635 size = CALL_EXPR_ARG (exp, 3);
11637 if (! host_integerp (size, 1))
11640 if (host_integerp (len, 1) || integer_all_onesp (size))
11644 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11646 warning_at (tree_nonartificial_location (exp),
11647 0, "%Kcall to %D will always overflow destination buffer",
11648 exp, get_callee_fndecl (exp));
11653 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11654 mem{cpy,pcpy,move,set} is available. */
11657 case BUILT_IN_MEMCPY_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMCPY];
11660 case BUILT_IN_MEMPCPY_CHK:
11661 fn = built_in_decls[BUILT_IN_MEMPCPY];
11663 case BUILT_IN_MEMMOVE_CHK:
11664 fn = built_in_decls[BUILT_IN_MEMMOVE];
11666 case BUILT_IN_MEMSET_CHK:
11667 fn = built_in_decls[BUILT_IN_MEMSET];
11676 fn = build_call_nofold (fn, 3, dest, src, len);
11677 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11678 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11679 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11681 else if (fcode == BUILT_IN_MEMSET_CHK)
11685 unsigned int dest_align
11686 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11688 /* If DEST is not a pointer type, call the normal function. */
11689 if (dest_align == 0)
11692 /* If SRC and DEST are the same (and not volatile), do nothing. */
11693 if (operand_equal_p (src, dest, 0))
11697 if (fcode != BUILT_IN_MEMPCPY_CHK)
11699 /* Evaluate and ignore LEN in case it has side-effects. */
11700 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11701 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11704 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11705 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11708 /* __memmove_chk special case. */
11709 if (fcode == BUILT_IN_MEMMOVE_CHK)
11711 unsigned int src_align
11712 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11714 if (src_align == 0)
11717 /* If src is categorized for a readonly section we can use
11718 normal __memcpy_chk. */
11719 if (readonly_data_expr (src))
11721 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11724 fn = build_call_nofold (fn, 4, dest, src, len, size);
11725 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11726 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11727 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11734 /* Emit warning if a buffer overflow is detected at compile time. */
11737 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11741 location_t loc = tree_nonartificial_location (exp);
11745 case BUILT_IN_STRCPY_CHK:
11746 case BUILT_IN_STPCPY_CHK:
11747 /* For __strcat_chk the warning will be emitted only if overflowing
11748 by at least strlen (dest) + 1 bytes. */
11749 case BUILT_IN_STRCAT_CHK:
11750 len = CALL_EXPR_ARG (exp, 1);
11751 size = CALL_EXPR_ARG (exp, 2);
11754 case BUILT_IN_STRNCAT_CHK:
11755 case BUILT_IN_STRNCPY_CHK:
11756 len = CALL_EXPR_ARG (exp, 2);
11757 size = CALL_EXPR_ARG (exp, 3);
11759 case BUILT_IN_SNPRINTF_CHK:
11760 case BUILT_IN_VSNPRINTF_CHK:
11761 len = CALL_EXPR_ARG (exp, 1);
11762 size = CALL_EXPR_ARG (exp, 3);
11765 gcc_unreachable ();
11771 if (! host_integerp (size, 1) || integer_all_onesp (size))
11776 len = c_strlen (len, 1);
11777 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11780 else if (fcode == BUILT_IN_STRNCAT_CHK)
11782 tree src = CALL_EXPR_ARG (exp, 1);
11783 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11785 src = c_strlen (src, 1);
11786 if (! src || ! host_integerp (src, 1))
11788 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11789 exp, get_callee_fndecl (exp));
11792 else if (tree_int_cst_lt (src, size))
11795 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11798 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11799 exp, get_callee_fndecl (exp));
11802 /* Emit warning if a buffer overflow is detected at compile time
11803 in __sprintf_chk/__vsprintf_chk calls. */
11806 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11808 tree size, len, fmt;
11809 const char *fmt_str;
11810 int nargs = call_expr_nargs (exp);
11812 /* Verify the required arguments in the original call. */
11816 size = CALL_EXPR_ARG (exp, 2);
11817 fmt = CALL_EXPR_ARG (exp, 3);
11819 if (! host_integerp (size, 1) || integer_all_onesp (size))
11822 /* Check whether the format is a literal string constant. */
11823 fmt_str = c_getstr (fmt);
11824 if (fmt_str == NULL)
11827 if (!init_target_chars ())
11830 /* If the format doesn't contain % args or %%, we know its size. */
11831 if (strchr (fmt_str, target_percent) == 0)
11832 len = build_int_cstu (size_type_node, strlen (fmt_str));
11833 /* If the format is "%s" and first ... argument is a string literal,
11835 else if (fcode == BUILT_IN_SPRINTF_CHK
11836 && strcmp (fmt_str, target_percent_s) == 0)
11842 arg = CALL_EXPR_ARG (exp, 4);
11843 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11846 len = c_strlen (arg, 1);
11847 if (!len || ! host_integerp (len, 1))
11853 if (! tree_int_cst_lt (len, size))
11854 warning_at (tree_nonartificial_location (exp),
11855 0, "%Kcall to %D will always overflow destination buffer",
11856 exp, get_callee_fndecl (exp));
11859 /* Emit warning if a free is called with address of a variable. */
11862 maybe_emit_free_warning (tree exp)
11864 tree arg = CALL_EXPR_ARG (exp, 0);
11867 if (TREE_CODE (arg) != ADDR_EXPR)
11870 arg = get_base_address (TREE_OPERAND (arg, 0));
11871 if (arg == NULL || INDIRECT_REF_P (arg))
11874 if (SSA_VAR_P (arg))
11875 warning_at (tree_nonartificial_location (exp),
11876 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11878 warning_at (tree_nonartificial_location (exp),
11879 0, "%Kattempt to free a non-heap object", exp);
11882 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11886 fold_builtin_object_size (tree ptr, tree ost)
11888 tree ret = NULL_TREE;
11889 int object_size_type;
11891 if (!validate_arg (ptr, POINTER_TYPE)
11892 || !validate_arg (ost, INTEGER_TYPE))
11897 if (TREE_CODE (ost) != INTEGER_CST
11898 || tree_int_cst_sgn (ost) < 0
11899 || compare_tree_int (ost, 3) > 0)
11902 object_size_type = tree_low_cst (ost, 0);
11904 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11905 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11906 and (size_t) 0 for types 2 and 3. */
11907 if (TREE_SIDE_EFFECTS (ptr))
11908 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11910 if (TREE_CODE (ptr) == ADDR_EXPR)
11911 ret = build_int_cstu (size_type_node,
11912 compute_builtin_object_size (ptr, object_size_type));
11914 else if (TREE_CODE (ptr) == SSA_NAME)
11916 unsigned HOST_WIDE_INT bytes;
11918 /* If object size is not known yet, delay folding until
11919 later. Maybe subsequent passes will help determining
11921 bytes = compute_builtin_object_size (ptr, object_size_type);
11922 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11924 ret = build_int_cstu (size_type_node, bytes);
11929 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11930 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11931 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11938 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11939 DEST, SRC, LEN, and SIZE are the arguments to the call.
11940 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11941 code of the builtin. If MAXLEN is not NULL, it is maximum length
11942 passed as third argument. */
11945 fold_builtin_memory_chk (location_t loc, tree fndecl,
11946 tree dest, tree src, tree len, tree size,
11947 tree maxlen, bool ignore,
11948 enum built_in_function fcode)
11952 if (!validate_arg (dest, POINTER_TYPE)
11953 || !validate_arg (src,
11954 (fcode == BUILT_IN_MEMSET_CHK
11955 ? INTEGER_TYPE : POINTER_TYPE))
11956 || !validate_arg (len, INTEGER_TYPE)
11957 || !validate_arg (size, INTEGER_TYPE))
11960 /* If SRC and DEST are the same (and not volatile), return DEST
11961 (resp. DEST+LEN for __mempcpy_chk). */
11962 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11964 if (fcode != BUILT_IN_MEMPCPY_CHK)
11965 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11969 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11971 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11975 if (! host_integerp (size, 1))
11978 if (! integer_all_onesp (size))
11980 if (! host_integerp (len, 1))
11982 /* If LEN is not constant, try MAXLEN too.
11983 For MAXLEN only allow optimizing into non-_ocs function
11984 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11985 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11987 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11989 /* (void) __mempcpy_chk () can be optimized into
11990 (void) __memcpy_chk (). */
11991 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11995 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12003 if (tree_int_cst_lt (size, maxlen))
12008 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12009 mem{cpy,pcpy,move,set} is available. */
12012 case BUILT_IN_MEMCPY_CHK:
12013 fn = built_in_decls[BUILT_IN_MEMCPY];
12015 case BUILT_IN_MEMPCPY_CHK:
12016 fn = built_in_decls[BUILT_IN_MEMPCPY];
12018 case BUILT_IN_MEMMOVE_CHK:
12019 fn = built_in_decls[BUILT_IN_MEMMOVE];
12021 case BUILT_IN_MEMSET_CHK:
12022 fn = built_in_decls[BUILT_IN_MEMSET];
12031 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12034 /* Fold a call to the __st[rp]cpy_chk builtin.
12035 DEST, SRC, and SIZE are the arguments to the call.
12036 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12037 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12038 strings passed as second argument. */
12041 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12042 tree src, tree size,
12043 tree maxlen, bool ignore,
12044 enum built_in_function fcode)
12048 if (!validate_arg (dest, POINTER_TYPE)
12049 || !validate_arg (src, POINTER_TYPE)
12050 || !validate_arg (size, INTEGER_TYPE))
12053 /* If SRC and DEST are the same (and not volatile), return DEST. */
12054 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12055 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12057 if (! host_integerp (size, 1))
12060 if (! integer_all_onesp (size))
12062 len = c_strlen (src, 1);
12063 if (! len || ! host_integerp (len, 1))
12065 /* If LEN is not constant, try MAXLEN too.
12066 For MAXLEN only allow optimizing into non-_ocs function
12067 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12068 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12070 if (fcode == BUILT_IN_STPCPY_CHK)
12075 /* If return value of __stpcpy_chk is ignored,
12076 optimize into __strcpy_chk. */
12077 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12081 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12084 if (! len || TREE_SIDE_EFFECTS (len))
12087 /* If c_strlen returned something, but not a constant,
12088 transform __strcpy_chk into __memcpy_chk. */
12089 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12093 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12094 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12095 build_call_expr_loc (loc, fn, 4,
12096 dest, src, len, size));
12102 if (! tree_int_cst_lt (maxlen, size))
12106 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12107 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12108 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12112 return build_call_expr_loc (loc, fn, 2, dest, src);
12115 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12116 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12117 length passed as third argument. */
12120 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12121 tree len, tree size, tree maxlen)
12125 if (!validate_arg (dest, POINTER_TYPE)
12126 || !validate_arg (src, POINTER_TYPE)
12127 || !validate_arg (len, INTEGER_TYPE)
12128 || !validate_arg (size, INTEGER_TYPE))
12131 if (! host_integerp (size, 1))
12134 if (! integer_all_onesp (size))
12136 if (! host_integerp (len, 1))
12138 /* If LEN is not constant, try MAXLEN too.
12139 For MAXLEN only allow optimizing into non-_ocs function
12140 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12141 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12147 if (tree_int_cst_lt (size, maxlen))
12151 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12152 fn = built_in_decls[BUILT_IN_STRNCPY];
12156 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12159 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12160 are the arguments to the call. */
12163 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12164 tree src, tree size)
12169 if (!validate_arg (dest, POINTER_TYPE)
12170 || !validate_arg (src, POINTER_TYPE)
12171 || !validate_arg (size, INTEGER_TYPE))
12174 p = c_getstr (src);
12175 /* If the SRC parameter is "", return DEST. */
12176 if (p && *p == '\0')
12177 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12179 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12182 /* If __builtin_strcat_chk is used, assume strcat is available. */
12183 fn = built_in_decls[BUILT_IN_STRCAT];
12187 return build_call_expr_loc (loc, fn, 2, dest, src);
12190 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12194 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12195 tree dest, tree src, tree len, tree size)
12200 if (!validate_arg (dest, POINTER_TYPE)
12201 || !validate_arg (src, POINTER_TYPE)
12202 || !validate_arg (size, INTEGER_TYPE)
12203 || !validate_arg (size, INTEGER_TYPE))
12206 p = c_getstr (src);
12207 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12208 if (p && *p == '\0')
12209 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12210 else if (integer_zerop (len))
12211 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12213 if (! host_integerp (size, 1))
12216 if (! integer_all_onesp (size))
12218 tree src_len = c_strlen (src, 1);
12220 && host_integerp (src_len, 1)
12221 && host_integerp (len, 1)
12222 && ! tree_int_cst_lt (len, src_len))
12224 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12225 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12229 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12234 /* If __builtin_strncat_chk is used, assume strncat is available. */
12235 fn = built_in_decls[BUILT_IN_STRNCAT];
12239 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12242 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12243 a normal call should be emitted rather than expanding the function
12244 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12247 fold_builtin_sprintf_chk (location_t loc, tree exp,
12248 enum built_in_function fcode)
12250 tree dest, size, len, fn, fmt, flag;
12251 const char *fmt_str;
12252 int nargs = call_expr_nargs (exp);
12254 /* Verify the required arguments in the original call. */
12257 dest = CALL_EXPR_ARG (exp, 0);
12258 if (!validate_arg (dest, POINTER_TYPE))
12260 flag = CALL_EXPR_ARG (exp, 1);
12261 if (!validate_arg (flag, INTEGER_TYPE))
12263 size = CALL_EXPR_ARG (exp, 2);
12264 if (!validate_arg (size, INTEGER_TYPE))
12266 fmt = CALL_EXPR_ARG (exp, 3);
12267 if (!validate_arg (fmt, POINTER_TYPE))
12270 if (! host_integerp (size, 1))
12275 if (!init_target_chars ())
12278 /* Check whether the format is a literal string constant. */
12279 fmt_str = c_getstr (fmt);
12280 if (fmt_str != NULL)
12282 /* If the format doesn't contain % args or %%, we know the size. */
12283 if (strchr (fmt_str, target_percent) == 0)
12285 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12286 len = build_int_cstu (size_type_node, strlen (fmt_str));
12288 /* If the format is "%s" and first ... argument is a string literal,
12289 we know the size too. */
12290 else if (fcode == BUILT_IN_SPRINTF_CHK
12291 && strcmp (fmt_str, target_percent_s) == 0)
12297 arg = CALL_EXPR_ARG (exp, 4);
12298 if (validate_arg (arg, POINTER_TYPE))
12300 len = c_strlen (arg, 1);
12301 if (! len || ! host_integerp (len, 1))
12308 if (! integer_all_onesp (size))
12310 if (! len || ! tree_int_cst_lt (len, size))
12314 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12315 or if format doesn't contain % chars or is "%s". */
12316 if (! integer_zerop (flag))
12318 if (fmt_str == NULL)
12320 if (strchr (fmt_str, target_percent) != NULL
12321 && strcmp (fmt_str, target_percent_s))
12325 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12326 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12327 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12331 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12334 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12335 a normal call should be emitted rather than expanding the function
12336 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12337 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12338 passed as second argument. */
12341 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12342 enum built_in_function fcode)
12344 tree dest, size, len, fn, fmt, flag;
12345 const char *fmt_str;
12347 /* Verify the required arguments in the original call. */
12348 if (call_expr_nargs (exp) < 5)
12350 dest = CALL_EXPR_ARG (exp, 0);
12351 if (!validate_arg (dest, POINTER_TYPE))
12353 len = CALL_EXPR_ARG (exp, 1);
12354 if (!validate_arg (len, INTEGER_TYPE))
12356 flag = CALL_EXPR_ARG (exp, 2);
12357 if (!validate_arg (flag, INTEGER_TYPE))
12359 size = CALL_EXPR_ARG (exp, 3);
12360 if (!validate_arg (size, INTEGER_TYPE))
12362 fmt = CALL_EXPR_ARG (exp, 4);
12363 if (!validate_arg (fmt, POINTER_TYPE))
12366 if (! host_integerp (size, 1))
12369 if (! integer_all_onesp (size))
12371 if (! host_integerp (len, 1))
12373 /* If LEN is not constant, try MAXLEN too.
12374 For MAXLEN only allow optimizing into non-_ocs function
12375 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12376 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12382 if (tree_int_cst_lt (size, maxlen))
12386 if (!init_target_chars ())
12389 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12390 or if format doesn't contain % chars or is "%s". */
12391 if (! integer_zerop (flag))
12393 fmt_str = c_getstr (fmt);
12394 if (fmt_str == NULL)
12396 if (strchr (fmt_str, target_percent) != NULL
12397 && strcmp (fmt_str, target_percent_s))
12401 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12403 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12404 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12408 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12411 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12412 FMT and ARG are the arguments to the call; we don't fold cases with
12413 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12415 Return NULL_TREE if no simplification was possible, otherwise return the
12416 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12417 code of the function to be simplified. */
12420 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12421 tree arg, bool ignore,
12422 enum built_in_function fcode)
12424 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12425 const char *fmt_str = NULL;
12427 /* If the return value is used, don't do the transformation. */
12431 /* Verify the required arguments in the original call. */
12432 if (!validate_arg (fmt, POINTER_TYPE))
12435 /* Check whether the format is a literal string constant. */
12436 fmt_str = c_getstr (fmt);
12437 if (fmt_str == NULL)
12440 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12442 /* If we're using an unlocked function, assume the other
12443 unlocked functions exist explicitly. */
12444 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12445 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12449 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12450 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12453 if (!init_target_chars ())
12456 if (strcmp (fmt_str, target_percent_s) == 0
12457 || strchr (fmt_str, target_percent) == NULL)
12461 if (strcmp (fmt_str, target_percent_s) == 0)
12463 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12466 if (!arg || !validate_arg (arg, POINTER_TYPE))
12469 str = c_getstr (arg);
12475 /* The format specifier doesn't contain any '%' characters. */
12476 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12482 /* If the string was "", printf does nothing. */
12483 if (str[0] == '\0')
12484 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12486 /* If the string has length of 1, call putchar. */
12487 if (str[1] == '\0')
12489 /* Given printf("c"), (where c is any one character,)
12490 convert "c"[0] to an int and pass that to the replacement
12492 newarg = build_int_cst (NULL_TREE, str[0]);
12494 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12498 /* If the string was "string\n", call puts("string"). */
12499 size_t len = strlen (str);
12500 if ((unsigned char)str[len - 1] == target_newline)
12502 /* Create a NUL-terminated string that's one char shorter
12503 than the original, stripping off the trailing '\n'. */
12504 char *newstr = XALLOCAVEC (char, len);
12505 memcpy (newstr, str, len - 1);
12506 newstr[len - 1] = 0;
12508 newarg = build_string_literal (len, newstr);
12510 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12513 /* We'd like to arrange to call fputs(string,stdout) here,
12514 but we need stdout and don't have a way to get it yet. */
12519 /* The other optimizations can be done only on the non-va_list variants. */
12520 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12523 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12524 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12526 if (!arg || !validate_arg (arg, POINTER_TYPE))
12529 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12532 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12533 else if (strcmp (fmt_str, target_percent_c) == 0)
12535 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12538 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12544 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12547 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12548 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12549 more than 3 arguments, and ARG may be null in the 2-argument case.
12551 Return NULL_TREE if no simplification was possible, otherwise return the
12552 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12553 code of the function to be simplified. */
12556 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12557 tree fmt, tree arg, bool ignore,
12558 enum built_in_function fcode)
12560 tree fn_fputc, fn_fputs, call = NULL_TREE;
12561 const char *fmt_str = NULL;
12563 /* If the return value is used, don't do the transformation. */
12567 /* Verify the required arguments in the original call. */
12568 if (!validate_arg (fp, POINTER_TYPE))
12570 if (!validate_arg (fmt, POINTER_TYPE))
12573 /* Check whether the format is a literal string constant. */
12574 fmt_str = c_getstr (fmt);
12575 if (fmt_str == NULL)
12578 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12580 /* If we're using an unlocked function, assume the other
12581 unlocked functions exist explicitly. */
12582 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12583 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12587 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12588 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12591 if (!init_target_chars ())
12594 /* If the format doesn't contain % args or %%, use strcpy. */
12595 if (strchr (fmt_str, target_percent) == NULL)
12597 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12601 /* If the format specifier was "", fprintf does nothing. */
12602 if (fmt_str[0] == '\0')
12604 /* If FP has side-effects, just wait until gimplification is
12606 if (TREE_SIDE_EFFECTS (fp))
12609 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12612 /* When "string" doesn't contain %, replace all cases of
12613 fprintf (fp, string) with fputs (string, fp). The fputs
12614 builtin will take care of special cases like length == 1. */
12616 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12619 /* The other optimizations can be done only on the non-va_list variants. */
12620 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12623 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12624 else if (strcmp (fmt_str, target_percent_s) == 0)
12626 if (!arg || !validate_arg (arg, POINTER_TYPE))
12629 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12632 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12633 else if (strcmp (fmt_str, target_percent_c) == 0)
12635 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12638 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12643 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12646 /* Initialize format string characters in the target charset. */
12649 init_target_chars (void)
12654 target_newline = lang_hooks.to_target_charset ('\n');
12655 target_percent = lang_hooks.to_target_charset ('%');
12656 target_c = lang_hooks.to_target_charset ('c');
12657 target_s = lang_hooks.to_target_charset ('s');
12658 if (target_newline == 0 || target_percent == 0 || target_c == 0
12662 target_percent_c[0] = target_percent;
12663 target_percent_c[1] = target_c;
12664 target_percent_c[2] = '\0';
12666 target_percent_s[0] = target_percent;
12667 target_percent_s[1] = target_s;
12668 target_percent_s[2] = '\0';
12670 target_percent_s_newline[0] = target_percent;
12671 target_percent_s_newline[1] = target_s;
12672 target_percent_s_newline[2] = target_newline;
12673 target_percent_s_newline[3] = '\0';
12680 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12681 and no overflow/underflow occurred. INEXACT is true if M was not
12682 exactly calculated. TYPE is the tree type for the result. This
12683 function assumes that you cleared the MPFR flags and then
12684 calculated M to see if anything subsequently set a flag prior to
12685 entering this function. Return NULL_TREE if any checks fail. */
12688 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12690 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12691 overflow/underflow occurred. If -frounding-math, proceed iff the
12692 result of calling FUNC was exact. */
12693 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12694 && (!flag_rounding_math || !inexact))
12696 REAL_VALUE_TYPE rr;
12698 real_from_mpfr (&rr, m, type, GMP_RNDN);
12699 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12700 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12701 but the mpft_t is not, then we underflowed in the
12703 if (real_isfinite (&rr)
12704 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12706 REAL_VALUE_TYPE rmode;
12708 real_convert (&rmode, TYPE_MODE (type), &rr);
12709 /* Proceed iff the specified mode can hold the value. */
12710 if (real_identical (&rmode, &rr))
12711 return build_real (type, rmode);
12717 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12718 number and no overflow/underflow occurred. INEXACT is true if M
12719 was not exactly calculated. TYPE is the tree type for the result.
12720 This function assumes that you cleared the MPFR flags and then
12721 calculated M to see if anything subsequently set a flag prior to
12722 entering this function. Return NULL_TREE if any checks fail, if
12723 FORCE_CONVERT is true, then bypass the checks. */
12726 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12728 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12729 overflow/underflow occurred. If -frounding-math, proceed iff the
12730 result of calling FUNC was exact. */
12732 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12733 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12734 && (!flag_rounding_math || !inexact)))
12736 REAL_VALUE_TYPE re, im;
12738 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12739 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12740 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12741 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12742 but the mpft_t is not, then we underflowed in the
12745 || (real_isfinite (&re) && real_isfinite (&im)
12746 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12747 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12749 REAL_VALUE_TYPE re_mode, im_mode;
12751 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12752 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12753 /* Proceed iff the specified mode can hold the value. */
12755 || (real_identical (&re_mode, &re)
12756 && real_identical (&im_mode, &im)))
12757 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12758 build_real (TREE_TYPE (type), im_mode));
12764 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12765 FUNC on it and return the resulting value as a tree with type TYPE.
12766 If MIN and/or MAX are not NULL, then the supplied ARG must be
12767 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12768 acceptable values, otherwise they are not. The mpfr precision is
12769 set to the precision of TYPE. We assume that function FUNC returns
12770 zero if the result could be calculated exactly within the requested
12774 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12775 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12778 tree result = NULL_TREE;
12782 /* To proceed, MPFR must exactly represent the target floating point
12783 format, which only happens when the target base equals two. */
12784 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12785 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12787 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12789 if (real_isfinite (ra)
12790 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12791 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12793 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12794 const int prec = fmt->p;
12795 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12799 mpfr_init2 (m, prec);
12800 mpfr_from_real (m, ra, GMP_RNDN);
12801 mpfr_clear_flags ();
12802 inexact = func (m, m, rnd);
12803 result = do_mpfr_ckconv (m, type, inexact);
12811 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12812 FUNC on it and return the resulting value as a tree with type TYPE.
12813 The mpfr precision is set to the precision of TYPE. We assume that
12814 function FUNC returns zero if the result could be calculated
12815 exactly within the requested precision. */
12818 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12819 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12821 tree result = NULL_TREE;
12826 /* To proceed, MPFR must exactly represent the target floating point
12827 format, which only happens when the target base equals two. */
12828 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12829 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12830 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12832 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12833 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12835 if (real_isfinite (ra1) && real_isfinite (ra2))
12837 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12838 const int prec = fmt->p;
12839 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12843 mpfr_inits2 (prec, m1, m2, NULL);
12844 mpfr_from_real (m1, ra1, GMP_RNDN);
12845 mpfr_from_real (m2, ra2, GMP_RNDN);
12846 mpfr_clear_flags ();
12847 inexact = func (m1, m1, m2, rnd);
12848 result = do_mpfr_ckconv (m1, type, inexact);
12849 mpfr_clears (m1, m2, NULL);
12856 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12857 FUNC on it and return the resulting value as a tree with type TYPE.
12858 The mpfr precision is set to the precision of TYPE. We assume that
12859 function FUNC returns zero if the result could be calculated
12860 exactly within the requested precision. */
12863 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12864 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12866 tree result = NULL_TREE;
12872 /* To proceed, MPFR must exactly represent the target floating point
12873 format, which only happens when the target base equals two. */
12874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12875 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12876 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12877 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12879 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12880 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12881 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12883 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12885 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12886 const int prec = fmt->p;
12887 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12891 mpfr_inits2 (prec, m1, m2, m3, NULL);
12892 mpfr_from_real (m1, ra1, GMP_RNDN);
12893 mpfr_from_real (m2, ra2, GMP_RNDN);
12894 mpfr_from_real (m3, ra3, GMP_RNDN);
12895 mpfr_clear_flags ();
12896 inexact = func (m1, m1, m2, m3, rnd);
12897 result = do_mpfr_ckconv (m1, type, inexact);
12898 mpfr_clears (m1, m2, m3, NULL);
12905 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12906 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12907 If ARG_SINP and ARG_COSP are NULL then the result is returned
12908 as a complex value.
12909 The type is taken from the type of ARG and is used for setting the
12910 precision of the calculation and results. */
12913 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12915 tree const type = TREE_TYPE (arg);
12916 tree result = NULL_TREE;
12920 /* To proceed, MPFR must exactly represent the target floating point
12921 format, which only happens when the target base equals two. */
12922 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12923 && TREE_CODE (arg) == REAL_CST
12924 && !TREE_OVERFLOW (arg))
12926 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12928 if (real_isfinite (ra))
12930 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12931 const int prec = fmt->p;
12932 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12933 tree result_s, result_c;
12937 mpfr_inits2 (prec, m, ms, mc, NULL);
12938 mpfr_from_real (m, ra, GMP_RNDN);
12939 mpfr_clear_flags ();
12940 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12941 result_s = do_mpfr_ckconv (ms, type, inexact);
12942 result_c = do_mpfr_ckconv (mc, type, inexact);
12943 mpfr_clears (m, ms, mc, NULL);
12944 if (result_s && result_c)
12946 /* If we are to return in a complex value do so. */
12947 if (!arg_sinp && !arg_cosp)
12948 return build_complex (build_complex_type (type),
12949 result_c, result_s);
12951 /* Dereference the sin/cos pointer arguments. */
12952 arg_sinp = build_fold_indirect_ref (arg_sinp);
12953 arg_cosp = build_fold_indirect_ref (arg_cosp);
12954 /* Proceed if valid pointer type were passed in. */
12955 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12956 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12958 /* Set the values. */
12959 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12961 TREE_SIDE_EFFECTS (result_s) = 1;
12962 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12964 TREE_SIDE_EFFECTS (result_c) = 1;
12965 /* Combine the assignments into a compound expr. */
12966 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12967 result_s, result_c));
12975 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12976 two-argument mpfr order N Bessel function FUNC on them and return
12977 the resulting value as a tree with type TYPE. The mpfr precision
12978 is set to the precision of TYPE. We assume that function FUNC
12979 returns zero if the result could be calculated exactly within the
12980 requested precision. */
12982 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12983 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12984 const REAL_VALUE_TYPE *min, bool inclusive)
12986 tree result = NULL_TREE;
12991 /* To proceed, MPFR must exactly represent the target floating point
12992 format, which only happens when the target base equals two. */
12993 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12994 && host_integerp (arg1, 0)
12995 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12997 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12998 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13001 && real_isfinite (ra)
13002 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13004 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13005 const int prec = fmt->p;
13006 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13010 mpfr_init2 (m, prec);
13011 mpfr_from_real (m, ra, GMP_RNDN);
13012 mpfr_clear_flags ();
13013 inexact = func (m, n, m, rnd);
13014 result = do_mpfr_ckconv (m, type, inexact);
13022 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13023 the pointer *(ARG_QUO) and return the result. The type is taken
13024 from the type of ARG0 and is used for setting the precision of the
13025 calculation and results. */
13028 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13030 tree const type = TREE_TYPE (arg0);
13031 tree result = NULL_TREE;
13036 /* To proceed, MPFR must exactly represent the target floating point
13037 format, which only happens when the target base equals two. */
13038 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13039 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13040 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13042 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13043 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13045 if (real_isfinite (ra0) && real_isfinite (ra1))
13047 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13048 const int prec = fmt->p;
13049 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13054 mpfr_inits2 (prec, m0, m1, NULL);
13055 mpfr_from_real (m0, ra0, GMP_RNDN);
13056 mpfr_from_real (m1, ra1, GMP_RNDN);
13057 mpfr_clear_flags ();
13058 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13059 /* Remquo is independent of the rounding mode, so pass
13060 inexact=0 to do_mpfr_ckconv(). */
13061 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13062 mpfr_clears (m0, m1, NULL);
13065 /* MPFR calculates quo in the host's long so it may
13066 return more bits in quo than the target int can hold
13067 if sizeof(host long) > sizeof(target int). This can
13068 happen even for native compilers in LP64 mode. In
13069 these cases, modulo the quo value with the largest
13070 number that the target int can hold while leaving one
13071 bit for the sign. */
13072 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13073 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13075 /* Dereference the quo pointer argument. */
13076 arg_quo = build_fold_indirect_ref (arg_quo);
13077 /* Proceed iff a valid pointer type was passed in. */
13078 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13080 /* Set the value. */
13081 tree result_quo = fold_build2 (MODIFY_EXPR,
13082 TREE_TYPE (arg_quo), arg_quo,
13083 build_int_cst (NULL, integer_quo));
13084 TREE_SIDE_EFFECTS (result_quo) = 1;
13085 /* Combine the quo assignment with the rem. */
13086 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13087 result_quo, result_rem));
13095 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13096 resulting value as a tree with type TYPE. The mpfr precision is
13097 set to the precision of TYPE. We assume that this mpfr function
13098 returns zero if the result could be calculated exactly within the
13099 requested precision. In addition, the integer pointer represented
13100 by ARG_SG will be dereferenced and set to the appropriate signgam
13104 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13106 tree result = NULL_TREE;
13110 /* To proceed, MPFR must exactly represent the target floating point
13111 format, which only happens when the target base equals two. Also
13112 verify ARG is a constant and that ARG_SG is an int pointer. */
13113 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13114 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13115 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13116 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13118 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13120 /* In addition to NaN and Inf, the argument cannot be zero or a
13121 negative integer. */
13122 if (real_isfinite (ra)
13123 && ra->cl != rvc_zero
13124 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13126 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13127 const int prec = fmt->p;
13128 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13133 mpfr_init2 (m, prec);
13134 mpfr_from_real (m, ra, GMP_RNDN);
13135 mpfr_clear_flags ();
13136 inexact = mpfr_lgamma (m, &sg, m, rnd);
13137 result_lg = do_mpfr_ckconv (m, type, inexact);
13143 /* Dereference the arg_sg pointer argument. */
13144 arg_sg = build_fold_indirect_ref (arg_sg);
13145 /* Assign the signgam value into *arg_sg. */
13146 result_sg = fold_build2 (MODIFY_EXPR,
13147 TREE_TYPE (arg_sg), arg_sg,
13148 build_int_cst (NULL, sg));
13149 TREE_SIDE_EFFECTS (result_sg) = 1;
13150 /* Combine the signgam assignment with the lgamma result. */
13151 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13152 result_sg, result_lg));
13160 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13161 function FUNC on it and return the resulting value as a tree with
13162 type TYPE. The mpfr precision is set to the precision of TYPE. We
13163 assume that function FUNC returns zero if the result could be
13164 calculated exactly within the requested precision. */
13167 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13169 tree result = NULL_TREE;
13173 /* To proceed, MPFR must exactly represent the target floating point
13174 format, which only happens when the target base equals two. */
13175 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13176 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13177 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13179 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13180 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13182 if (real_isfinite (re) && real_isfinite (im))
13184 const struct real_format *const fmt =
13185 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13186 const int prec = fmt->p;
13187 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13188 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13192 mpc_init2 (m, prec);
13193 mpfr_from_real (mpc_realref(m), re, rnd);
13194 mpfr_from_real (mpc_imagref(m), im, rnd);
13195 mpfr_clear_flags ();
13196 inexact = func (m, m, crnd);
13197 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13205 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13206 mpc function FUNC on it and return the resulting value as a tree
13207 with type TYPE. The mpfr precision is set to the precision of
13208 TYPE. We assume that function FUNC returns zero if the result
13209 could be calculated exactly within the requested precision. If
13210 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13211 in the arguments and/or results. */
13214 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13215 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13217 tree result = NULL_TREE;
13222 /* To proceed, MPFR must exactly represent the target floating point
13223 format, which only happens when the target base equals two. */
13224 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13225 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13226 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13228 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13230 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13231 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13232 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13233 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13236 || (real_isfinite (re0) && real_isfinite (im0)
13237 && real_isfinite (re1) && real_isfinite (im1)))
13239 const struct real_format *const fmt =
13240 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13241 const int prec = fmt->p;
13242 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13243 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13247 mpc_init2 (m0, prec);
13248 mpc_init2 (m1, prec);
13249 mpfr_from_real (mpc_realref(m0), re0, rnd);
13250 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13251 mpfr_from_real (mpc_realref(m1), re1, rnd);
13252 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13253 mpfr_clear_flags ();
13254 inexact = func (m0, m0, m1, crnd);
13255 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13265 The functions below provide an alternate interface for folding
13266 builtin function calls presented as GIMPLE_CALL statements rather
13267 than as CALL_EXPRs. The folded result is still expressed as a
13268 tree. There is too much code duplication in the handling of
13269 varargs functions, and a more intrusive re-factoring would permit
13270 better sharing of code between the tree and statement-based
13271 versions of these functions. */
13273 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13274 along with N new arguments specified as the "..." parameters. SKIP
13275 is the number of arguments in STMT to be omitted. This function is used
13276 to do varargs-to-varargs transformations. */
13279 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13281 int oldnargs = gimple_call_num_args (stmt);
13282 int nargs = oldnargs - skip + n;
13283 tree fntype = TREE_TYPE (fndecl);
13284 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13288 location_t loc = gimple_location (stmt);
13290 buffer = XALLOCAVEC (tree, nargs);
13292 for (i = 0; i < n; i++)
13293 buffer[i] = va_arg (ap, tree);
13295 for (j = skip; j < oldnargs; j++, i++)
13296 buffer[i] = gimple_call_arg (stmt, j);
13298 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13301 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13302 a normal call should be emitted rather than expanding the function
13303 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13306 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13308 tree dest, size, len, fn, fmt, flag;
13309 const char *fmt_str;
13310 int nargs = gimple_call_num_args (stmt);
13312 /* Verify the required arguments in the original call. */
13315 dest = gimple_call_arg (stmt, 0);
13316 if (!validate_arg (dest, POINTER_TYPE))
13318 flag = gimple_call_arg (stmt, 1);
13319 if (!validate_arg (flag, INTEGER_TYPE))
13321 size = gimple_call_arg (stmt, 2);
13322 if (!validate_arg (size, INTEGER_TYPE))
13324 fmt = gimple_call_arg (stmt, 3);
13325 if (!validate_arg (fmt, POINTER_TYPE))
13328 if (! host_integerp (size, 1))
13333 if (!init_target_chars ())
13336 /* Check whether the format is a literal string constant. */
13337 fmt_str = c_getstr (fmt);
13338 if (fmt_str != NULL)
13340 /* If the format doesn't contain % args or %%, we know the size. */
13341 if (strchr (fmt_str, target_percent) == 0)
13343 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13344 len = build_int_cstu (size_type_node, strlen (fmt_str));
13346 /* If the format is "%s" and first ... argument is a string literal,
13347 we know the size too. */
13348 else if (fcode == BUILT_IN_SPRINTF_CHK
13349 && strcmp (fmt_str, target_percent_s) == 0)
13355 arg = gimple_call_arg (stmt, 4);
13356 if (validate_arg (arg, POINTER_TYPE))
13358 len = c_strlen (arg, 1);
13359 if (! len || ! host_integerp (len, 1))
13366 if (! integer_all_onesp (size))
13368 if (! len || ! tree_int_cst_lt (len, size))
13372 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13373 or if format doesn't contain % chars or is "%s". */
13374 if (! integer_zerop (flag))
13376 if (fmt_str == NULL)
13378 if (strchr (fmt_str, target_percent) != NULL
13379 && strcmp (fmt_str, target_percent_s))
13383 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13384 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13385 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13389 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13392 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13393 a normal call should be emitted rather than expanding the function
13394 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13395 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13396 passed as second argument. */
13399 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13400 enum built_in_function fcode)
13402 tree dest, size, len, fn, fmt, flag;
13403 const char *fmt_str;
13405 /* Verify the required arguments in the original call. */
13406 if (gimple_call_num_args (stmt) < 5)
13408 dest = gimple_call_arg (stmt, 0);
13409 if (!validate_arg (dest, POINTER_TYPE))
13411 len = gimple_call_arg (stmt, 1);
13412 if (!validate_arg (len, INTEGER_TYPE))
13414 flag = gimple_call_arg (stmt, 2);
13415 if (!validate_arg (flag, INTEGER_TYPE))
13417 size = gimple_call_arg (stmt, 3);
13418 if (!validate_arg (size, INTEGER_TYPE))
13420 fmt = gimple_call_arg (stmt, 4);
13421 if (!validate_arg (fmt, POINTER_TYPE))
13424 if (! host_integerp (size, 1))
13427 if (! integer_all_onesp (size))
13429 if (! host_integerp (len, 1))
13431 /* If LEN is not constant, try MAXLEN too.
13432 For MAXLEN only allow optimizing into non-_ocs function
13433 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13434 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13440 if (tree_int_cst_lt (size, maxlen))
13444 if (!init_target_chars ())
13447 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13448 or if format doesn't contain % chars or is "%s". */
13449 if (! integer_zerop (flag))
13451 fmt_str = c_getstr (fmt);
13452 if (fmt_str == NULL)
13454 if (strchr (fmt_str, target_percent) != NULL
13455 && strcmp (fmt_str, target_percent_s))
13459 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13461 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13462 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13466 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13469 /* Builtins with folding operations that operate on "..." arguments
13470 need special handling; we need to store the arguments in a convenient
13471 data structure before attempting any folding. Fortunately there are
13472 only a few builtins that fall into this category. FNDECL is the
13473 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13474 result of the function call is ignored. */
13477 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13478 bool ignore ATTRIBUTE_UNUSED)
13480 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13481 tree ret = NULL_TREE;
13485 case BUILT_IN_SPRINTF_CHK:
13486 case BUILT_IN_VSPRINTF_CHK:
13487 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13490 case BUILT_IN_SNPRINTF_CHK:
13491 case BUILT_IN_VSNPRINTF_CHK:
13492 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13499 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13500 TREE_NO_WARNING (ret) = 1;
13506 /* A wrapper function for builtin folding that prevents warnings for
13507 "statement without effect" and the like, caused by removing the
13508 call node earlier than the warning is generated. */
13511 fold_call_stmt (gimple stmt, bool ignore)
13513 tree ret = NULL_TREE;
13514 tree fndecl = gimple_call_fndecl (stmt);
13515 location_t loc = gimple_location (stmt);
13517 && TREE_CODE (fndecl) == FUNCTION_DECL
13518 && DECL_BUILT_IN (fndecl)
13519 && !gimple_call_va_arg_pack_p (stmt))
13521 int nargs = gimple_call_num_args (stmt);
13523 if (avoid_folding_inline_builtin (fndecl))
13525 /* FIXME: Don't use a list in this interface. */
13526 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13528 tree arglist = NULL_TREE;
13530 for (i = nargs - 1; i >= 0; i--)
13531 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13532 return targetm.fold_builtin (fndecl, arglist, ignore);
13536 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13538 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13540 for (i = 0; i < nargs; i++)
13541 args[i] = gimple_call_arg (stmt, i);
13542 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13545 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13548 /* Propagate location information from original call to
13549 expansion of builtin. Otherwise things like
13550 maybe_emit_chk_warning, that operate on the expansion
13551 of a builtin, will use the wrong location information. */
13552 if (gimple_has_location (stmt))
13554 tree realret = ret;
13555 if (TREE_CODE (ret) == NOP_EXPR)
13556 realret = TREE_OPERAND (ret, 0);
13557 if (CAN_HAVE_LOCATION_P (realret)
13558 && !EXPR_HAS_LOCATION (realret))
13559 SET_EXPR_LOCATION (realret, loc);
13569 /* Look up the function in built_in_decls that corresponds to DECL
13570 and set ASMSPEC as its user assembler name. DECL must be a
13571 function decl that declares a builtin. */
13574 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13577 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13578 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13581 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13582 set_user_assembler_name (builtin, asmspec);
13583 switch (DECL_FUNCTION_CODE (decl))
13585 case BUILT_IN_MEMCPY:
13586 init_block_move_fn (asmspec);
13587 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13589 case BUILT_IN_MEMSET:
13590 init_block_clear_fn (asmspec);
13591 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13593 case BUILT_IN_MEMMOVE:
13594 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13596 case BUILT_IN_MEMCMP:
13597 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13599 case BUILT_IN_ABORT:
13600 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13603 if (INT_TYPE_SIZE < BITS_PER_WORD)
13605 set_user_assembler_libfunc ("ffs", asmspec);
13606 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13607 MODE_INT, 0), "ffs");