1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
457 loc = input_location;
459 src = string_constant (src, &offset_node);
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
473 for (i = 0; i < max; i++)
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
491 else if (! host_integerp (offset_node, 0))
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
526 src = string_constant (src, &offset_node);
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 c_readstr (const char *str, enum machine_mode mode)
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
654 SETUP_FRAME_ADDRESSES ();
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
796 if (i == ARRAY_SIZE (elim_regs))
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1270 apply_args_size (void)
1272 static int size = -1;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1303 apply_args_mode[regno] = VOIDmode;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1313 apply_result_size (void)
1315 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1384 expand_builtin_apply_args_1 (void)
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1463 temp = expand_builtin_apply_args_1 ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1515 do_pending_stack_adjust ();
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 /* Find the CALL insn we just emitted, and attach the register usage
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 /* Restore the return value and note that each value is used. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1687 call_fusage = get_insns ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab);
1887 /* If this built-in doesn't throw an exception, set errno directly. */
1888 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1890 #ifdef GEN_ERRNO_RTX
1891 rtx errno_rtx = GEN_ERRNO_RTX;
1894 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1896 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1902 /* Make sure the library call isn't expanded as a tail call. */
1903 CALL_EXPR_TAILCALL (exp) = 0;
1905 /* We can't set errno=EDOM directly; let the library call do it.
1906 Pop the arguments right away in case the call gets deleted. */
1908 expand_call (exp, target, 0);
1913 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1914 Return NULL_RTX if a normal call should be emitted rather than expanding
1915 the function in-line. EXP is the expression that is a call to the builtin
1916 function; if convenient, the result should be placed in TARGET.
1917 SUBTARGET may be used as the target for computing one of EXP's operands. */
1920 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1922 optab builtin_optab;
1924 tree fndecl = get_callee_fndecl (exp);
1925 enum machine_mode mode;
1926 bool errno_set = false;
1929 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 arg = CALL_EXPR_ARG (exp, 0);
1934 switch (DECL_FUNCTION_CODE (fndecl))
1936 CASE_FLT_FN (BUILT_IN_SQRT):
1937 errno_set = ! tree_expr_nonnegative_p (arg);
1938 builtin_optab = sqrt_optab;
1940 CASE_FLT_FN (BUILT_IN_EXP):
1941 errno_set = true; builtin_optab = exp_optab; break;
1942 CASE_FLT_FN (BUILT_IN_EXP10):
1943 CASE_FLT_FN (BUILT_IN_POW10):
1944 errno_set = true; builtin_optab = exp10_optab; break;
1945 CASE_FLT_FN (BUILT_IN_EXP2):
1946 errno_set = true; builtin_optab = exp2_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXPM1):
1948 errno_set = true; builtin_optab = expm1_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOGB):
1950 errno_set = true; builtin_optab = logb_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG):
1952 errno_set = true; builtin_optab = log_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG10):
1954 errno_set = true; builtin_optab = log10_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG2):
1956 errno_set = true; builtin_optab = log2_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG1P):
1958 errno_set = true; builtin_optab = log1p_optab; break;
1959 CASE_FLT_FN (BUILT_IN_ASIN):
1960 builtin_optab = asin_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ACOS):
1962 builtin_optab = acos_optab; break;
1963 CASE_FLT_FN (BUILT_IN_TAN):
1964 builtin_optab = tan_optab; break;
1965 CASE_FLT_FN (BUILT_IN_ATAN):
1966 builtin_optab = atan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_FLOOR):
1968 builtin_optab = floor_optab; break;
1969 CASE_FLT_FN (BUILT_IN_CEIL):
1970 builtin_optab = ceil_optab; break;
1971 CASE_FLT_FN (BUILT_IN_TRUNC):
1972 builtin_optab = btrunc_optab; break;
1973 CASE_FLT_FN (BUILT_IN_ROUND):
1974 builtin_optab = round_optab; break;
1975 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1976 builtin_optab = nearbyint_optab;
1977 if (flag_trapping_math)
1979 /* Else fallthrough and expand as rint. */
1980 CASE_FLT_FN (BUILT_IN_RINT):
1981 builtin_optab = rint_optab; break;
1982 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1983 builtin_optab = significand_optab; break;
1988 /* Make a suitable register to place result in. */
1989 mode = TYPE_MODE (TREE_TYPE (exp));
1991 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 /* Before working hard, check whether the instruction is available. */
1995 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1997 target = gen_reg_rtx (mode);
1999 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2000 need to expand the argument again. This way, we will not perform
2001 side-effects more the once. */
2002 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2004 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2008 /* Compute into TARGET.
2009 Set TARGET to wherever the result comes back. */
2010 target = expand_unop (mode, builtin_optab, op0, target, 0);
2015 expand_errno_check (exp, target);
2017 /* Output the entire sequence. */
2018 insns = get_insns ();
2024 /* If we were unable to expand via the builtin, stop the sequence
2025 (without outputting the insns) and call to the library function
2026 with the stabilized argument list. */
2030 return expand_call (exp, target, target == const0_rtx);
2033 /* Expand a call to the builtin binary math functions (pow and atan2).
2034 Return NULL_RTX if a normal call should be emitted rather than expanding the
2035 function in-line. EXP is the expression that is a call to the builtin
2036 function; if convenient, the result should be placed in TARGET.
2037 SUBTARGET may be used as the target for computing one of EXP's
2041 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2043 optab builtin_optab;
2044 rtx op0, op1, insns;
2045 int op1_type = REAL_TYPE;
2046 tree fndecl = get_callee_fndecl (exp);
2048 enum machine_mode mode;
2049 bool errno_set = true;
2051 switch (DECL_FUNCTION_CODE (fndecl))
2053 CASE_FLT_FN (BUILT_IN_SCALBN):
2054 CASE_FLT_FN (BUILT_IN_SCALBLN):
2055 CASE_FLT_FN (BUILT_IN_LDEXP):
2056 op1_type = INTEGER_TYPE;
2061 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 arg0 = CALL_EXPR_ARG (exp, 0);
2065 arg1 = CALL_EXPR_ARG (exp, 1);
2067 switch (DECL_FUNCTION_CODE (fndecl))
2069 CASE_FLT_FN (BUILT_IN_POW):
2070 builtin_optab = pow_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ATAN2):
2072 builtin_optab = atan2_optab; break;
2073 CASE_FLT_FN (BUILT_IN_SCALB):
2074 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2076 builtin_optab = scalb_optab; break;
2077 CASE_FLT_FN (BUILT_IN_SCALBN):
2078 CASE_FLT_FN (BUILT_IN_SCALBLN):
2079 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2081 /* Fall through... */
2082 CASE_FLT_FN (BUILT_IN_LDEXP):
2083 builtin_optab = ldexp_optab; break;
2084 CASE_FLT_FN (BUILT_IN_FMOD):
2085 builtin_optab = fmod_optab; break;
2086 CASE_FLT_FN (BUILT_IN_REMAINDER):
2087 CASE_FLT_FN (BUILT_IN_DREM):
2088 builtin_optab = remainder_optab; break;
2093 /* Make a suitable register to place result in. */
2094 mode = TYPE_MODE (TREE_TYPE (exp));
2096 /* Before working hard, check whether the instruction is available. */
2097 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 target = gen_reg_rtx (mode);
2102 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 /* Always stabilize the argument list. */
2106 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2107 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2109 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2110 op1 = expand_normal (arg1);
2114 /* Compute into TARGET.
2115 Set TARGET to wherever the result comes back. */
2116 target = expand_binop (mode, builtin_optab, op0, op1,
2117 target, 0, OPTAB_DIRECT);
2119 /* If we were unable to expand via the builtin, stop the sequence
2120 (without outputting the insns) and call to the library function
2121 with the stabilized argument list. */
2125 return expand_call (exp, target, target == const0_rtx);
2129 expand_errno_check (exp, target);
2131 /* Output the entire sequence. */
2132 insns = get_insns ();
2139 /* Expand a call to the builtin sin and cos math functions.
2140 Return NULL_RTX if a normal call should be emitted rather than expanding the
2141 function in-line. EXP is the expression that is a call to the builtin
2142 function; if convenient, the result should be placed in TARGET.
2143 SUBTARGET may be used as the target for computing one of EXP's
2147 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2149 optab builtin_optab;
2151 tree fndecl = get_callee_fndecl (exp);
2152 enum machine_mode mode;
2155 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 arg = CALL_EXPR_ARG (exp, 0);
2160 switch (DECL_FUNCTION_CODE (fndecl))
2162 CASE_FLT_FN (BUILT_IN_SIN):
2163 CASE_FLT_FN (BUILT_IN_COS):
2164 builtin_optab = sincos_optab; break;
2169 /* Make a suitable register to place result in. */
2170 mode = TYPE_MODE (TREE_TYPE (exp));
2172 /* Check if sincos insn is available, otherwise fallback
2173 to sin or cos insn. */
2174 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2175 switch (DECL_FUNCTION_CODE (fndecl))
2177 CASE_FLT_FN (BUILT_IN_SIN):
2178 builtin_optab = sin_optab; break;
2179 CASE_FLT_FN (BUILT_IN_COS):
2180 builtin_optab = cos_optab; break;
2185 /* Before working hard, check whether the instruction is available. */
2186 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2188 target = gen_reg_rtx (mode);
2190 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2191 need to expand the argument again. This way, we will not perform
2192 side-effects more the once. */
2193 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2195 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2199 /* Compute into TARGET.
2200 Set TARGET to wherever the result comes back. */
2201 if (builtin_optab == sincos_optab)
2205 switch (DECL_FUNCTION_CODE (fndecl))
2207 CASE_FLT_FN (BUILT_IN_SIN):
2208 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2210 CASE_FLT_FN (BUILT_IN_COS):
2211 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2216 gcc_assert (result);
2220 target = expand_unop (mode, builtin_optab, op0, target, 0);
2225 /* Output the entire sequence. */
2226 insns = get_insns ();
2232 /* If we were unable to expand via the builtin, stop the sequence
2233 (without outputting the insns) and call to the library function
2234 with the stabilized argument list. */
2238 target = expand_call (exp, target, target == const0_rtx);
2243 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2244 return an RTL instruction code that implements the functionality.
2245 If that isn't possible or available return CODE_FOR_nothing. */
2247 static enum insn_code
2248 interclass_mathfn_icode (tree arg, tree fndecl)
2250 bool errno_set = false;
2251 optab builtin_optab = 0;
2252 enum machine_mode mode;
2254 switch (DECL_FUNCTION_CODE (fndecl))
2256 CASE_FLT_FN (BUILT_IN_ILOGB):
2257 errno_set = true; builtin_optab = ilogb_optab; break;
2258 CASE_FLT_FN (BUILT_IN_ISINF):
2259 builtin_optab = isinf_optab; break;
2260 case BUILT_IN_ISNORMAL:
2261 case BUILT_IN_ISFINITE:
2262 CASE_FLT_FN (BUILT_IN_FINITE):
2263 case BUILT_IN_FINITED32:
2264 case BUILT_IN_FINITED64:
2265 case BUILT_IN_FINITED128:
2266 case BUILT_IN_ISINFD32:
2267 case BUILT_IN_ISINFD64:
2268 case BUILT_IN_ISINFD128:
2269 /* These builtins have no optabs (yet). */
2275 /* There's no easy way to detect the case we need to set EDOM. */
2276 if (flag_errno_math && errno_set)
2277 return CODE_FOR_nothing;
2279 /* Optab mode depends on the mode of the input argument. */
2280 mode = TYPE_MODE (TREE_TYPE (arg));
2283 return optab_handler (builtin_optab, mode)->insn_code;
2284 return CODE_FOR_nothing;
2287 /* Expand a call to one of the builtin math functions that operate on
2288 floating point argument and output an integer result (ilogb, isinf,
2290 Return 0 if a normal call should be emitted rather than expanding the
2291 function in-line. EXP is the expression that is a call to the builtin
2292 function; if convenient, the result should be placed in TARGET.
2293 SUBTARGET may be used as the target for computing one of EXP's operands. */
2296 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2298 enum insn_code icode = CODE_FOR_nothing;
2300 tree fndecl = get_callee_fndecl (exp);
2301 enum machine_mode mode;
2304 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 arg = CALL_EXPR_ARG (exp, 0);
2308 icode = interclass_mathfn_icode (arg, fndecl);
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2311 if (icode != CODE_FOR_nothing)
2313 /* Make a suitable register to place result in. */
2315 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2316 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2318 gcc_assert (insn_data[icode].operand[0].predicate
2319 (target, GET_MODE (target)));
2321 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2322 need to expand the argument again. This way, we will not perform
2323 side-effects more the once. */
2324 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2326 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2328 if (mode != GET_MODE (op0))
2329 op0 = convert_to_mode (mode, op0, 0);
2331 /* Compute into TARGET.
2332 Set TARGET to wherever the result comes back. */
2333 emit_unop_insn (icode, target, op0, UNKNOWN);
2340 /* Expand a call to the builtin sincos math function.
2341 Return NULL_RTX if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2346 expand_builtin_sincos (tree exp)
2348 rtx op0, op1, op2, target1, target2;
2349 enum machine_mode mode;
2350 tree arg, sinp, cosp;
2352 location_t loc = EXPR_LOCATION (exp);
2354 if (!validate_arglist (exp, REAL_TYPE,
2355 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2358 arg = CALL_EXPR_ARG (exp, 0);
2359 sinp = CALL_EXPR_ARG (exp, 1);
2360 cosp = CALL_EXPR_ARG (exp, 2);
2362 /* Make a suitable register to place result in. */
2363 mode = TYPE_MODE (TREE_TYPE (arg));
2365 /* Check if sincos insn is available, otherwise emit the call. */
2366 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2369 target1 = gen_reg_rtx (mode);
2370 target2 = gen_reg_rtx (mode);
2372 op0 = expand_normal (arg);
2373 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2374 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2376 /* Compute into target1 and target2.
2377 Set TARGET to wherever the result comes back. */
2378 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2379 gcc_assert (result);
2381 /* Move target1 and target2 to the memory locations indicated
2383 emit_move_insn (op1, target1);
2384 emit_move_insn (op2, target2);
2389 /* Expand a call to the internal cexpi builtin to the sincos math function.
2390 EXP is the expression that is a call to the builtin function; if convenient,
2391 the result should be placed in TARGET. SUBTARGET may be used as the target
2392 for computing one of EXP's operands. */
2395 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2397 tree fndecl = get_callee_fndecl (exp);
2399 enum machine_mode mode;
2401 location_t loc = EXPR_LOCATION (exp);
2403 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2406 arg = CALL_EXPR_ARG (exp, 0);
2407 type = TREE_TYPE (arg);
2408 mode = TYPE_MODE (TREE_TYPE (arg));
2410 /* Try expanding via a sincos optab, fall back to emitting a libcall
2411 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2412 is only generated from sincos, cexp or if we have either of them. */
2413 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2415 op1 = gen_reg_rtx (mode);
2416 op2 = gen_reg_rtx (mode);
2418 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2420 /* Compute into op1 and op2. */
2421 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2423 else if (TARGET_HAS_SINCOS)
2425 tree call, fn = NULL_TREE;
2429 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2430 fn = built_in_decls[BUILT_IN_SINCOSF];
2431 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2432 fn = built_in_decls[BUILT_IN_SINCOS];
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2434 fn = built_in_decls[BUILT_IN_SINCOSL];
2438 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2439 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2440 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2441 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2442 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2443 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2445 /* Make sure not to fold the sincos call again. */
2446 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2447 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2448 call, 3, arg, top1, top2));
2452 tree call, fn = NULL_TREE, narg;
2453 tree ctype = build_complex_type (type);
2455 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2456 fn = built_in_decls[BUILT_IN_CEXPF];
2457 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2458 fn = built_in_decls[BUILT_IN_CEXP];
2459 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2460 fn = built_in_decls[BUILT_IN_CEXPL];
2464 /* If we don't have a decl for cexp create one. This is the
2465 friendliest fallback if the user calls __builtin_cexpi
2466 without full target C99 function support. */
2467 if (fn == NULL_TREE)
2470 const char *name = NULL;
2472 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2474 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2479 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2480 fn = build_fn_decl (name, fntype);
2483 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2484 build_real (type, dconst0), arg);
2486 /* Make sure not to fold the cexp call again. */
2487 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2488 return expand_expr (build_call_nary (ctype, call, 1, narg),
2489 target, VOIDmode, EXPAND_NORMAL);
2492 /* Now build the proper return type. */
2493 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2494 make_tree (TREE_TYPE (arg), op2),
2495 make_tree (TREE_TYPE (arg), op1)),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Conveniently construct a function call expression. FNDECL names the
2500 function to be called, N is the number of arguments, and the "..."
2501 parameters are the argument expressions. Unlike build_call_exr
2502 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2505 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2508 tree fntype = TREE_TYPE (fndecl);
2509 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2512 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2514 SET_EXPR_LOCATION (fn, loc);
2517 #define build_call_nofold(...) \
2518 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2520 /* Expand a call to one of the builtin rounding functions gcc defines
2521 as an extension (lfloor and lceil). As these are gcc extensions we
2522 do not need to worry about setting errno to EDOM.
2523 If expanding via optab fails, lower expression to (int)(floor(x)).
2524 EXP is the expression that is a call to the builtin function;
2525 if convenient, the result should be placed in TARGET. */
2528 expand_builtin_int_roundingfn (tree exp, rtx target)
2530 convert_optab builtin_optab;
2531 rtx op0, insns, tmp;
2532 tree fndecl = get_callee_fndecl (exp);
2533 enum built_in_function fallback_fn;
2534 tree fallback_fndecl;
2535 enum machine_mode mode;
2538 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2541 arg = CALL_EXPR_ARG (exp, 0);
2543 switch (DECL_FUNCTION_CODE (fndecl))
2545 CASE_FLT_FN (BUILT_IN_LCEIL):
2546 CASE_FLT_FN (BUILT_IN_LLCEIL):
2547 builtin_optab = lceil_optab;
2548 fallback_fn = BUILT_IN_CEIL;
2551 CASE_FLT_FN (BUILT_IN_LFLOOR):
2552 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2553 builtin_optab = lfloor_optab;
2554 fallback_fn = BUILT_IN_FLOOR;
2561 /* Make a suitable register to place result in. */
2562 mode = TYPE_MODE (TREE_TYPE (exp));
2564 target = gen_reg_rtx (mode);
2566 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2567 need to expand the argument again. This way, we will not perform
2568 side-effects more the once. */
2569 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2571 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2575 /* Compute into TARGET. */
2576 if (expand_sfix_optab (target, op0, builtin_optab))
2578 /* Output the entire sequence. */
2579 insns = get_insns ();
2585 /* If we were unable to expand via the builtin, stop the sequence
2586 (without outputting the insns). */
2589 /* Fall back to floating point rounding optab. */
2590 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2592 /* For non-C99 targets we may end up without a fallback fndecl here
2593 if the user called __builtin_lfloor directly. In this case emit
2594 a call to the floor/ceil variants nevertheless. This should result
2595 in the best user experience for not full C99 targets. */
2596 if (fallback_fndecl == NULL_TREE)
2599 const char *name = NULL;
2601 switch (DECL_FUNCTION_CODE (fndecl))
2603 case BUILT_IN_LCEIL:
2604 case BUILT_IN_LLCEIL:
2607 case BUILT_IN_LCEILF:
2608 case BUILT_IN_LLCEILF:
2611 case BUILT_IN_LCEILL:
2612 case BUILT_IN_LLCEILL:
2615 case BUILT_IN_LFLOOR:
2616 case BUILT_IN_LLFLOOR:
2619 case BUILT_IN_LFLOORF:
2620 case BUILT_IN_LLFLOORF:
2623 case BUILT_IN_LFLOORL:
2624 case BUILT_IN_LLFLOORL:
2631 fntype = build_function_type_list (TREE_TYPE (arg),
2632 TREE_TYPE (arg), NULL_TREE);
2633 fallback_fndecl = build_fn_decl (name, fntype);
2636 exp = build_call_nofold (fallback_fndecl, 1, arg);
2638 tmp = expand_normal (exp);
2640 /* Truncate the result of floating point optab to integer
2641 via expand_fix (). */
2642 target = gen_reg_rtx (mode);
2643 expand_fix (target, tmp, 0);
2648 /* Expand a call to one of the builtin math functions doing integer
2650 Return 0 if a normal call should be emitted rather than expanding the
2651 function in-line. EXP is the expression that is a call to the builtin
2652 function; if convenient, the result should be placed in TARGET. */
2655 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2657 convert_optab builtin_optab;
2659 tree fndecl = get_callee_fndecl (exp);
2661 enum machine_mode mode;
2663 /* There's no easy way to detect the case we need to set EDOM. */
2664 if (flag_errno_math)
2667 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2670 arg = CALL_EXPR_ARG (exp, 0);
2672 switch (DECL_FUNCTION_CODE (fndecl))
2674 CASE_FLT_FN (BUILT_IN_LRINT):
2675 CASE_FLT_FN (BUILT_IN_LLRINT):
2676 builtin_optab = lrint_optab; break;
2677 CASE_FLT_FN (BUILT_IN_LROUND):
2678 CASE_FLT_FN (BUILT_IN_LLROUND):
2679 builtin_optab = lround_optab; break;
2684 /* Make a suitable register to place result in. */
2685 mode = TYPE_MODE (TREE_TYPE (exp));
2687 target = gen_reg_rtx (mode);
2689 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2690 need to expand the argument again. This way, we will not perform
2691 side-effects more the once. */
2692 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2694 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2698 if (expand_sfix_optab (target, op0, builtin_optab))
2700 /* Output the entire sequence. */
2701 insns = get_insns ();
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns) and call to the library function
2709 with the stabilized argument list. */
2712 target = expand_call (exp, target, target == const0_rtx);
2717 /* To evaluate powi(x,n), the floating point value x raised to the
2718 constant integer exponent n, we use a hybrid algorithm that
2719 combines the "window method" with look-up tables. For an
2720 introduction to exponentiation algorithms and "addition chains",
2721 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2722 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2723 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2724 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2726 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2727 multiplications to inline before calling the system library's pow
2728 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2729 so this default never requires calling pow, powf or powl. */
2731 #ifndef POWI_MAX_MULTS
2732 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2735 /* The size of the "optimal power tree" lookup table. All
2736 exponents less than this value are simply looked up in the
2737 powi_table below. This threshold is also used to size the
2738 cache of pseudo registers that hold intermediate results. */
2739 #define POWI_TABLE_SIZE 256
2741 /* The size, in bits of the window, used in the "window method"
2742 exponentiation algorithm. This is equivalent to a radix of
2743 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2744 #define POWI_WINDOW_SIZE 3
2746 /* The following table is an efficient representation of an
2747 "optimal power tree". For each value, i, the corresponding
2748 value, j, in the table states than an optimal evaluation
2749 sequence for calculating pow(x,i) can be found by evaluating
2750 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2751 100 integers is given in Knuth's "Seminumerical algorithms". */
2753 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2755 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2756 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2757 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2758 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2759 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2760 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2761 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2762 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2763 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2764 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2765 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2766 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2767 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2768 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2769 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2770 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2771 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2772 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2773 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2774 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2775 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2776 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2777 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2778 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2779 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2780 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2781 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2782 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2783 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2784 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2785 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2786 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2790 /* Return the number of multiplications required to calculate
2791 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2792 subroutine of powi_cost. CACHE is an array indicating
2793 which exponents have already been calculated. */
2796 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2798 /* If we've already calculated this exponent, then this evaluation
2799 doesn't require any additional multiplications. */
2804 return powi_lookup_cost (n - powi_table[n], cache)
2805 + powi_lookup_cost (powi_table[n], cache) + 1;
2808 /* Return the number of multiplications required to calculate
2809 powi(x,n) for an arbitrary x, given the exponent N. This
2810 function needs to be kept in sync with expand_powi below. */
2813 powi_cost (HOST_WIDE_INT n)
2815 bool cache[POWI_TABLE_SIZE];
2816 unsigned HOST_WIDE_INT digit;
2817 unsigned HOST_WIDE_INT val;
2823 /* Ignore the reciprocal when calculating the cost. */
2824 val = (n < 0) ? -n : n;
2826 /* Initialize the exponent cache. */
2827 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2832 while (val >= POWI_TABLE_SIZE)
2836 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2837 result += powi_lookup_cost (digit, cache)
2838 + POWI_WINDOW_SIZE + 1;
2839 val >>= POWI_WINDOW_SIZE;
2848 return result + powi_lookup_cost (val, cache);
2851 /* Recursive subroutine of expand_powi. This function takes the array,
2852 CACHE, of already calculated exponents and an exponent N and returns
2853 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2856 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2858 unsigned HOST_WIDE_INT digit;
2862 if (n < POWI_TABLE_SIZE)
2867 target = gen_reg_rtx (mode);
2870 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2871 op1 = expand_powi_1 (mode, powi_table[n], cache);
2875 target = gen_reg_rtx (mode);
2876 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2877 op0 = expand_powi_1 (mode, n - digit, cache);
2878 op1 = expand_powi_1 (mode, digit, cache);
2882 target = gen_reg_rtx (mode);
2883 op0 = expand_powi_1 (mode, n >> 1, cache);
2887 result = expand_mult (mode, op0, op1, target, 0);
2888 if (result != target)
2889 emit_move_insn (target, result);
2893 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2894 floating point operand in mode MODE, and N is the exponent. This
2895 function needs to be kept in sync with powi_cost above. */
2898 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2900 rtx cache[POWI_TABLE_SIZE];
2904 return CONST1_RTX (mode);
2906 memset (cache, 0, sizeof (cache));
2909 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2911 /* If the original exponent was negative, reciprocate the result. */
2913 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2914 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2919 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2920 a normal call should be emitted rather than expanding the function
2921 in-line. EXP is the expression that is a call to the builtin
2922 function; if convenient, the result should be placed in TARGET. */
2925 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2929 tree type = TREE_TYPE (exp);
2930 REAL_VALUE_TYPE cint, c, c2;
2933 enum machine_mode mode = TYPE_MODE (type);
2935 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2938 arg0 = CALL_EXPR_ARG (exp, 0);
2939 arg1 = CALL_EXPR_ARG (exp, 1);
2941 if (TREE_CODE (arg1) != REAL_CST
2942 || TREE_OVERFLOW (arg1))
2943 return expand_builtin_mathfn_2 (exp, target, subtarget);
2945 /* Handle constant exponents. */
2947 /* For integer valued exponents we can expand to an optimal multiplication
2948 sequence using expand_powi. */
2949 c = TREE_REAL_CST (arg1);
2950 n = real_to_integer (&c);
2951 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2952 if (real_identical (&c, &cint)
2953 && ((n >= -1 && n <= 2)
2954 || (flag_unsafe_math_optimizations
2955 && optimize_insn_for_speed_p ()
2956 && powi_cost (n) <= POWI_MAX_MULTS)))
2958 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2961 op = force_reg (mode, op);
2962 op = expand_powi (op, mode, n);
2967 narg0 = builtin_save_expr (arg0);
2969 /* If the exponent is not integer valued, check if it is half of an integer.
2970 In this case we can expand to sqrt (x) * x**(n/2). */
2971 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2972 if (fn != NULL_TREE)
2974 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2975 n = real_to_integer (&c2);
2976 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2977 if (real_identical (&c2, &cint)
2978 && ((flag_unsafe_math_optimizations
2979 && optimize_insn_for_speed_p ()
2980 && powi_cost (n/2) <= POWI_MAX_MULTS)
2983 tree call_expr = build_call_nofold (fn, 1, narg0);
2984 /* Use expand_expr in case the newly built call expression
2985 was folded to a non-call. */
2986 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2989 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2990 op2 = force_reg (mode, op2);
2991 op2 = expand_powi (op2, mode, abs (n / 2));
2992 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2993 0, OPTAB_LIB_WIDEN);
2994 /* If the original exponent was negative, reciprocate the
2997 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2998 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3004 /* Try if the exponent is a third of an integer. In this case
3005 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3006 different from pow (x, 1./3.) due to rounding and behavior
3007 with negative x we need to constrain this transformation to
3008 unsafe math and positive x or finite math. */
3009 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3011 && flag_unsafe_math_optimizations
3012 && (tree_expr_nonnegative_p (arg0)
3013 || !HONOR_NANS (mode)))
3015 REAL_VALUE_TYPE dconst3;
3016 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3017 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3018 real_round (&c2, mode, &c2);
3019 n = real_to_integer (&c2);
3020 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3021 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3022 real_convert (&c2, mode, &c2);
3023 if (real_identical (&c2, &c)
3024 && ((optimize_insn_for_speed_p ()
3025 && powi_cost (n/3) <= POWI_MAX_MULTS)
3028 tree call_expr = build_call_nofold (fn, 1,narg0);
3029 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3030 if (abs (n) % 3 == 2)
3031 op = expand_simple_binop (mode, MULT, op, op, op,
3032 0, OPTAB_LIB_WIDEN);
3035 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3036 op2 = force_reg (mode, op2);
3037 op2 = expand_powi (op2, mode, abs (n / 3));
3038 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3039 0, OPTAB_LIB_WIDEN);
3040 /* If the original exponent was negative, reciprocate the
3043 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3044 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3050 /* Fall back to optab expansion. */
3051 return expand_builtin_mathfn_2 (exp, target, subtarget);
3054 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3055 a normal call should be emitted rather than expanding the function
3056 in-line. EXP is the expression that is a call to the builtin
3057 function; if convenient, the result should be placed in TARGET. */
3060 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3064 enum machine_mode mode;
3065 enum machine_mode mode2;
3067 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3070 arg0 = CALL_EXPR_ARG (exp, 0);
3071 arg1 = CALL_EXPR_ARG (exp, 1);
3072 mode = TYPE_MODE (TREE_TYPE (exp));
3074 /* Handle constant power. */
3076 if (TREE_CODE (arg1) == INTEGER_CST
3077 && !TREE_OVERFLOW (arg1))
3079 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3081 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3082 Otherwise, check the number of multiplications required. */
3083 if ((TREE_INT_CST_HIGH (arg1) == 0
3084 || TREE_INT_CST_HIGH (arg1) == -1)
3085 && ((n >= -1 && n <= 2)
3086 || (optimize_insn_for_speed_p ()
3087 && powi_cost (n) <= POWI_MAX_MULTS)))
3089 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3090 op0 = force_reg (mode, op0);
3091 return expand_powi (op0, mode, n);
3095 /* Emit a libcall to libgcc. */
3097 /* Mode of the 2nd argument must match that of an int. */
3098 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3100 if (target == NULL_RTX)
3101 target = gen_reg_rtx (mode);
3103 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3104 if (GET_MODE (op0) != mode)
3105 op0 = convert_to_mode (mode, op0, 0);
3106 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3107 if (GET_MODE (op1) != mode2)
3108 op1 = convert_to_mode (mode2, op1, 0);
3110 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3111 target, LCT_CONST, mode, 2,
3112 op0, mode, op1, mode2);
3117 /* Expand expression EXP which is a call to the strlen builtin. Return
3118 NULL_RTX if we failed the caller should emit a normal call, otherwise
3119 try to get the result in TARGET, if convenient. */
3122 expand_builtin_strlen (tree exp, rtx target,
3123 enum machine_mode target_mode)
3125 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3131 tree src = CALL_EXPR_ARG (exp, 0);
3132 rtx result, src_reg, char_rtx, before_strlen;
3133 enum machine_mode insn_mode = target_mode, char_mode;
3134 enum insn_code icode = CODE_FOR_nothing;
3137 /* If the length can be computed at compile-time, return it. */
3138 len = c_strlen (src, 0);
3140 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3142 /* If the length can be computed at compile-time and is constant
3143 integer, but there are side-effects in src, evaluate
3144 src for side-effects, then return len.
3145 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3146 can be optimized into: i++; x = 3; */
3147 len = c_strlen (src, 1);
3148 if (len && TREE_CODE (len) == INTEGER_CST)
3150 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3151 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3154 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3156 /* If SRC is not a pointer type, don't do this operation inline. */
3160 /* Bail out if we can't compute strlen in the right mode. */
3161 while (insn_mode != VOIDmode)
3163 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3164 if (icode != CODE_FOR_nothing)
3167 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3169 if (insn_mode == VOIDmode)
3172 /* Make a place to write the result of the instruction. */
3176 && GET_MODE (result) == insn_mode
3177 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3178 result = gen_reg_rtx (insn_mode);
3180 /* Make a place to hold the source address. We will not expand
3181 the actual source until we are sure that the expansion will
3182 not fail -- there are trees that cannot be expanded twice. */
3183 src_reg = gen_reg_rtx (Pmode);
3185 /* Mark the beginning of the strlen sequence so we can emit the
3186 source operand later. */
3187 before_strlen = get_last_insn ();
3189 char_rtx = const0_rtx;
3190 char_mode = insn_data[(int) icode].operand[2].mode;
3191 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3193 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3195 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3196 char_rtx, GEN_INT (align));
3201 /* Now that we are assured of success, expand the source. */
3203 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3205 emit_move_insn (src_reg, pat);
3210 emit_insn_after (pat, before_strlen);
3212 emit_insn_before (pat, get_insns ());
3214 /* Return the value in the proper mode for this function. */
3215 if (GET_MODE (result) == target_mode)
3217 else if (target != 0)
3218 convert_move (target, result, 0);
3220 target = convert_to_mode (target_mode, result, 0);
3226 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3227 bytes from constant string DATA + OFFSET and return it as target
3231 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3232 enum machine_mode mode)
3234 const char *str = (const char *) data;
3236 gcc_assert (offset >= 0
3237 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3238 <= strlen (str) + 1));
3240 return c_readstr (str + offset, mode);
3243 /* Expand a call EXP to the memcpy builtin.
3244 Return NULL_RTX if we failed, the caller should emit a normal call,
3245 otherwise try to get the result in TARGET, if convenient (and in
3246 mode MODE if that's convenient). */
3249 expand_builtin_memcpy (tree exp, rtx target)
3251 if (!validate_arglist (exp,
3252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3256 tree dest = CALL_EXPR_ARG (exp, 0);
3257 tree src = CALL_EXPR_ARG (exp, 1);
3258 tree len = CALL_EXPR_ARG (exp, 2);
3259 const char *src_str;
3260 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3261 unsigned int dest_align
3262 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3263 rtx dest_mem, src_mem, dest_addr, len_rtx;
3264 HOST_WIDE_INT expected_size = -1;
3265 unsigned int expected_align = 0;
3267 /* If DEST is not a pointer type, call the normal function. */
3268 if (dest_align == 0)
3271 /* If either SRC is not a pointer type, don't do this
3272 operation in-line. */
3276 if (currently_expanding_gimple_stmt)
3277 stringop_block_profile (currently_expanding_gimple_stmt,
3278 &expected_align, &expected_size);
3280 if (expected_align < dest_align)
3281 expected_align = dest_align;
3282 dest_mem = get_memory_rtx (dest, len);
3283 set_mem_align (dest_mem, dest_align);
3284 len_rtx = expand_normal (len);
3285 src_str = c_getstr (src);
3287 /* If SRC is a string constant and block move would be done
3288 by pieces, we can avoid loading the string from memory
3289 and only stored the computed constants. */
3291 && CONST_INT_P (len_rtx)
3292 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3293 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3294 CONST_CAST (char *, src_str),
3297 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3298 builtin_memcpy_read_str,
3299 CONST_CAST (char *, src_str),
3300 dest_align, false, 0);
3301 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3302 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3306 src_mem = get_memory_rtx (src, len);
3307 set_mem_align (src_mem, src_align);
3309 /* Copy word part most expediently. */
3310 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3311 CALL_EXPR_TAILCALL (exp)
3312 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3313 expected_align, expected_size);
3317 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3318 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3324 /* Expand a call EXP to the mempcpy builtin.
3325 Return NULL_RTX if we failed; the caller should emit a normal call,
3326 otherwise try to get the result in TARGET, if convenient (and in
3327 mode MODE if that's convenient). If ENDP is 0 return the
3328 destination pointer, if ENDP is 1 return the end pointer ala
3329 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3333 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3335 if (!validate_arglist (exp,
3336 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3340 tree dest = CALL_EXPR_ARG (exp, 0);
3341 tree src = CALL_EXPR_ARG (exp, 1);
3342 tree len = CALL_EXPR_ARG (exp, 2);
3343 return expand_builtin_mempcpy_args (dest, src, len,
3344 target, mode, /*endp=*/ 1);
3348 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3349 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3350 so that this can also be called without constructing an actual CALL_EXPR.
3351 The other arguments and return value are the same as for
3352 expand_builtin_mempcpy. */
3355 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3356 rtx target, enum machine_mode mode, int endp)
3358 /* If return value is ignored, transform mempcpy into memcpy. */
3359 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3361 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3362 tree result = build_call_nofold (fn, 3, dest, src, len);
3363 return expand_expr (result, target, mode, EXPAND_NORMAL);
3367 const char *src_str;
3368 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3369 unsigned int dest_align
3370 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3371 rtx dest_mem, src_mem, len_rtx;
3373 /* If either SRC or DEST is not a pointer type, don't do this
3374 operation in-line. */
3375 if (dest_align == 0 || src_align == 0)
3378 /* If LEN is not constant, call the normal function. */
3379 if (! host_integerp (len, 1))
3382 len_rtx = expand_normal (len);
3383 src_str = c_getstr (src);
3385 /* If SRC is a string constant and block move would be done
3386 by pieces, we can avoid loading the string from memory
3387 and only stored the computed constants. */
3389 && CONST_INT_P (len_rtx)
3390 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3391 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3392 CONST_CAST (char *, src_str),
3395 dest_mem = get_memory_rtx (dest, len);
3396 set_mem_align (dest_mem, dest_align);
3397 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3398 builtin_memcpy_read_str,
3399 CONST_CAST (char *, src_str),
3400 dest_align, false, endp);
3401 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3402 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3406 if (CONST_INT_P (len_rtx)
3407 && can_move_by_pieces (INTVAL (len_rtx),
3408 MIN (dest_align, src_align)))
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 src_mem = get_memory_rtx (src, len);
3413 set_mem_align (src_mem, src_align);
3414 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3415 MIN (dest_align, src_align), endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3426 # define HAVE_movstr 0
3427 # define CODE_FOR_movstr CODE_FOR_nothing
3430 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3431 we failed, the caller should emit a normal call, otherwise try to
3432 get the result in TARGET, if convenient. If ENDP is 0 return the
3433 destination pointer, if ENDP is 1 return the end pointer ala
3434 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3438 expand_movstr (tree dest, tree src, rtx target, int endp)
3444 const struct insn_data * data;
3449 dest_mem = get_memory_rtx (dest, NULL);
3450 src_mem = get_memory_rtx (src, NULL);
3453 target = force_reg (Pmode, XEXP (dest_mem, 0));
3454 dest_mem = replace_equiv_address (dest_mem, target);
3455 end = gen_reg_rtx (Pmode);
3459 if (target == 0 || target == const0_rtx)
3461 end = gen_reg_rtx (Pmode);
3469 data = insn_data + CODE_FOR_movstr;
3471 if (data->operand[0].mode != VOIDmode)
3472 end = gen_lowpart (data->operand[0].mode, end);
3474 insn = data->genfun (end, dest_mem, src_mem);
3480 /* movstr is supposed to set end to the address of the NUL
3481 terminator. If the caller requested a mempcpy-like return value,
3483 if (endp == 1 && target != const0_rtx)
3485 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3486 emit_move_insn (target, force_operand (tem, NULL_RTX));
3492 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3493 NULL_RTX if we failed the caller should emit a normal call, otherwise
3494 try to get the result in TARGET, if convenient (and in mode MODE if that's
3498 expand_builtin_strcpy (tree exp, rtx target)
3500 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3502 tree dest = CALL_EXPR_ARG (exp, 0);
3503 tree src = CALL_EXPR_ARG (exp, 1);
3504 return expand_builtin_strcpy_args (dest, src, target);
3509 /* Helper function to do the actual work for expand_builtin_strcpy. The
3510 arguments to the builtin_strcpy call DEST and SRC are broken out
3511 so that this can also be called without constructing an actual CALL_EXPR.
3512 The other arguments and return value are the same as for
3513 expand_builtin_strcpy. */
3516 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3518 return expand_movstr (dest, src, target, /*endp=*/0);
3521 /* Expand a call EXP to the stpcpy builtin.
3522 Return NULL_RTX if we failed the caller should emit a normal call,
3523 otherwise try to get the result in TARGET, if convenient (and in
3524 mode MODE if that's convenient). */
3527 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3530 location_t loc = EXPR_LOCATION (exp);
3532 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3535 dst = CALL_EXPR_ARG (exp, 0);
3536 src = CALL_EXPR_ARG (exp, 1);
3538 /* If return value is ignored, transform stpcpy into strcpy. */
3539 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3541 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3542 tree result = build_call_nofold (fn, 2, dst, src);
3543 return expand_expr (result, target, mode, EXPAND_NORMAL);
3550 /* Ensure we get an actual string whose length can be evaluated at
3551 compile-time, not an expression containing a string. This is
3552 because the latter will potentially produce pessimized code
3553 when used to produce the return value. */
3554 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3555 return expand_movstr (dst, src, target, /*endp=*/2);
3557 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3558 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3559 target, mode, /*endp=*/2);
3564 if (TREE_CODE (len) == INTEGER_CST)
3566 rtx len_rtx = expand_normal (len);
3568 if (CONST_INT_P (len_rtx))
3570 ret = expand_builtin_strcpy_args (dst, src, target);
3576 if (mode != VOIDmode)
3577 target = gen_reg_rtx (mode);
3579 target = gen_reg_rtx (GET_MODE (ret));
3581 if (GET_MODE (target) != GET_MODE (ret))
3582 ret = gen_lowpart (GET_MODE (target), ret);
3584 ret = plus_constant (ret, INTVAL (len_rtx));
3585 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3593 return expand_movstr (dst, src, target, /*endp=*/2);
3597 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3598 bytes from constant string DATA + OFFSET and return it as target
3602 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3603 enum machine_mode mode)
3605 const char *str = (const char *) data;
3607 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3610 return c_readstr (str + offset, mode);
3613 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3614 NULL_RTX if we failed the caller should emit a normal call. */
3617 expand_builtin_strncpy (tree exp, rtx target)
3619 location_t loc = EXPR_LOCATION (exp);
3621 if (validate_arglist (exp,
3622 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3624 tree dest = CALL_EXPR_ARG (exp, 0);
3625 tree src = CALL_EXPR_ARG (exp, 1);
3626 tree len = CALL_EXPR_ARG (exp, 2);
3627 tree slen = c_strlen (src, 1);
3629 /* We must be passed a constant len and src parameter. */
3630 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3633 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3635 /* We're required to pad with trailing zeros if the requested
3636 len is greater than strlen(s2)+1. In that case try to
3637 use store_by_pieces, if it fails, punt. */
3638 if (tree_int_cst_lt (slen, len))
3640 unsigned int dest_align
3641 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3642 const char *p = c_getstr (src);
3645 if (!p || dest_align == 0 || !host_integerp (len, 1)
3646 || !can_store_by_pieces (tree_low_cst (len, 1),
3647 builtin_strncpy_read_str,
3648 CONST_CAST (char *, p),
3652 dest_mem = get_memory_rtx (dest, len);
3653 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3654 builtin_strncpy_read_str,
3655 CONST_CAST (char *, p), dest_align, false, 0);
3656 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3657 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3664 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3665 bytes from constant string DATA + OFFSET and return it as target
3669 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3670 enum machine_mode mode)
3672 const char *c = (const char *) data;
3673 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3675 memset (p, *c, GET_MODE_SIZE (mode));
3677 return c_readstr (p, mode);
3680 /* Callback routine for store_by_pieces. Return the RTL of a register
3681 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3682 char value given in the RTL register data. For example, if mode is
3683 4 bytes wide, return the RTL for 0x01010101*data. */
3686 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3687 enum machine_mode mode)
3693 size = GET_MODE_SIZE (mode);
3697 p = XALLOCAVEC (char, size);
3698 memset (p, 1, size);
3699 coeff = c_readstr (p, mode);
3701 target = convert_to_mode (mode, (rtx) data, 1);
3702 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3703 return force_reg (mode, target);
3706 /* Expand expression EXP, which is a call to the memset builtin. Return
3707 NULL_RTX if we failed the caller should emit a normal call, otherwise
3708 try to get the result in TARGET, if convenient (and in mode MODE if that's
3712 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3714 if (!validate_arglist (exp,
3715 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3719 tree dest = CALL_EXPR_ARG (exp, 0);
3720 tree val = CALL_EXPR_ARG (exp, 1);
3721 tree len = CALL_EXPR_ARG (exp, 2);
3722 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3726 /* Helper function to do the actual work for expand_builtin_memset. The
3727 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3728 so that this can also be called without constructing an actual CALL_EXPR.
3729 The other arguments and return value are the same as for
3730 expand_builtin_memset. */
3733 expand_builtin_memset_args (tree dest, tree val, tree len,
3734 rtx target, enum machine_mode mode, tree orig_exp)
3737 enum built_in_function fcode;
3739 unsigned int dest_align;
3740 rtx dest_mem, dest_addr, len_rtx;
3741 HOST_WIDE_INT expected_size = -1;
3742 unsigned int expected_align = 0;
3744 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3746 /* If DEST is not a pointer type, don't do this operation in-line. */
3747 if (dest_align == 0)
3750 if (currently_expanding_gimple_stmt)
3751 stringop_block_profile (currently_expanding_gimple_stmt,
3752 &expected_align, &expected_size);
3754 if (expected_align < dest_align)
3755 expected_align = dest_align;
3757 /* If the LEN parameter is zero, return DEST. */
3758 if (integer_zerop (len))
3760 /* Evaluate and ignore VAL in case it has side-effects. */
3761 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3762 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3765 /* Stabilize the arguments in case we fail. */
3766 dest = builtin_save_expr (dest);
3767 val = builtin_save_expr (val);
3768 len = builtin_save_expr (len);
3770 len_rtx = expand_normal (len);
3771 dest_mem = get_memory_rtx (dest, len);
3773 if (TREE_CODE (val) != INTEGER_CST)
3777 val_rtx = expand_normal (val);
3778 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3781 /* Assume that we can memset by pieces if we can store
3782 * the coefficients by pieces (in the required modes).
3783 * We can't pass builtin_memset_gen_str as that emits RTL. */
3785 if (host_integerp (len, 1)
3786 && can_store_by_pieces (tree_low_cst (len, 1),
3787 builtin_memset_read_str, &c, dest_align,
3790 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3792 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3793 builtin_memset_gen_str, val_rtx, dest_align,
3796 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3797 dest_align, expected_align,
3801 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3802 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3806 if (target_char_cast (val, &c))
3811 if (host_integerp (len, 1)
3812 && can_store_by_pieces (tree_low_cst (len, 1),
3813 builtin_memset_read_str, &c, dest_align,
3815 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3816 builtin_memset_read_str, &c, dest_align, true, 0);
3817 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3818 dest_align, expected_align,
3822 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3823 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3827 set_mem_align (dest_mem, dest_align);
3828 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3829 CALL_EXPR_TAILCALL (orig_exp)
3830 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3831 expected_align, expected_size);
3835 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3836 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3842 fndecl = get_callee_fndecl (orig_exp);
3843 fcode = DECL_FUNCTION_CODE (fndecl);
3844 if (fcode == BUILT_IN_MEMSET)
3845 fn = build_call_nofold (fndecl, 3, dest, val, len);
3846 else if (fcode == BUILT_IN_BZERO)
3847 fn = build_call_nofold (fndecl, 2, dest, len);
3850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3852 return expand_call (fn, target, target == const0_rtx);
3855 /* Expand expression EXP, which is a call to the bzero builtin. Return
3856 NULL_RTX if we failed the caller should emit a normal call. */
3859 expand_builtin_bzero (tree exp)
3862 location_t loc = EXPR_LOCATION (exp);
3864 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3867 dest = CALL_EXPR_ARG (exp, 0);
3868 size = CALL_EXPR_ARG (exp, 1);
3870 /* New argument list transforming bzero(ptr x, int y) to
3871 memset(ptr x, int 0, size_t y). This is done this way
3872 so that if it isn't expanded inline, we fallback to
3873 calling bzero instead of memset. */
3875 return expand_builtin_memset_args (dest, integer_zero_node,
3876 fold_convert_loc (loc, sizetype, size),
3877 const0_rtx, VOIDmode, exp);
3880 /* Expand expression EXP, which is a call to the memcmp built-in function.
3881 Return NULL_RTX if we failed and the
3882 caller should emit a normal call, otherwise try to get the result in
3883 TARGET, if convenient (and in mode MODE, if that's convenient). */
3886 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3887 ATTRIBUTE_UNUSED enum machine_mode mode)
3889 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3895 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3897 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3900 tree arg1 = CALL_EXPR_ARG (exp, 0);
3901 tree arg2 = CALL_EXPR_ARG (exp, 1);
3902 tree len = CALL_EXPR_ARG (exp, 2);
3905 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3907 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3908 enum machine_mode insn_mode;
3910 #ifdef HAVE_cmpmemsi
3912 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3915 #ifdef HAVE_cmpstrnsi
3917 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3922 /* If we don't have POINTER_TYPE, call the function. */
3923 if (arg1_align == 0 || arg2_align == 0)
3926 /* Make a place to write the result of the instruction. */
3929 && REG_P (result) && GET_MODE (result) == insn_mode
3930 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3931 result = gen_reg_rtx (insn_mode);
3933 arg1_rtx = get_memory_rtx (arg1, len);
3934 arg2_rtx = get_memory_rtx (arg2, len);
3935 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3937 /* Set MEM_SIZE as appropriate. */
3938 if (CONST_INT_P (arg3_rtx))
3940 set_mem_size (arg1_rtx, arg3_rtx);
3941 set_mem_size (arg2_rtx, arg3_rtx);
3944 #ifdef HAVE_cmpmemsi
3946 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3947 GEN_INT (MIN (arg1_align, arg2_align)));
3950 #ifdef HAVE_cmpstrnsi
3952 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3953 GEN_INT (MIN (arg1_align, arg2_align)));
3961 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3962 TYPE_MODE (integer_type_node), 3,
3963 XEXP (arg1_rtx, 0), Pmode,
3964 XEXP (arg2_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3966 TYPE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3969 /* Return the value in the proper mode for this function. */
3970 mode = TYPE_MODE (TREE_TYPE (exp));
3971 if (GET_MODE (result) == mode)
3973 else if (target != 0)
3975 convert_move (target, result, 0);
3979 return convert_to_mode (mode, result, 0);
3986 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3987 if we failed the caller should emit a normal call, otherwise try to get
3988 the result in TARGET, if convenient. */
3991 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3993 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3996 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3997 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3998 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4000 rtx arg1_rtx, arg2_rtx;
4001 rtx result, insn = NULL_RTX;
4003 tree arg1 = CALL_EXPR_ARG (exp, 0);
4004 tree arg2 = CALL_EXPR_ARG (exp, 1);
4007 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4009 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4011 /* If we don't have POINTER_TYPE, call the function. */
4012 if (arg1_align == 0 || arg2_align == 0)
4015 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4016 arg1 = builtin_save_expr (arg1);
4017 arg2 = builtin_save_expr (arg2);
4019 arg1_rtx = get_memory_rtx (arg1, NULL);
4020 arg2_rtx = get_memory_rtx (arg2, NULL);
4022 #ifdef HAVE_cmpstrsi
4023 /* Try to call cmpstrsi. */
4026 enum machine_mode insn_mode
4027 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4029 /* Make a place to write the result of the instruction. */
4032 && REG_P (result) && GET_MODE (result) == insn_mode
4033 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4034 result = gen_reg_rtx (insn_mode);
4036 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4037 GEN_INT (MIN (arg1_align, arg2_align)));
4040 #ifdef HAVE_cmpstrnsi
4041 /* Try to determine at least one length and call cmpstrnsi. */
4042 if (!insn && HAVE_cmpstrnsi)
4047 enum machine_mode insn_mode
4048 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4049 tree len1 = c_strlen (arg1, 1);
4050 tree len2 = c_strlen (arg2, 1);
4053 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4055 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4057 /* If we don't have a constant length for the first, use the length
4058 of the second, if we know it. We don't require a constant for
4059 this case; some cost analysis could be done if both are available
4060 but neither is constant. For now, assume they're equally cheap,
4061 unless one has side effects. If both strings have constant lengths,
4068 else if (TREE_SIDE_EFFECTS (len1))
4070 else if (TREE_SIDE_EFFECTS (len2))
4072 else if (TREE_CODE (len1) != INTEGER_CST)
4074 else if (TREE_CODE (len2) != INTEGER_CST)
4076 else if (tree_int_cst_lt (len1, len2))
4081 /* If both arguments have side effects, we cannot optimize. */
4082 if (!len || TREE_SIDE_EFFECTS (len))
4085 arg3_rtx = expand_normal (len);
4087 /* Make a place to write the result of the instruction. */
4090 && REG_P (result) && GET_MODE (result) == insn_mode
4091 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4092 result = gen_reg_rtx (insn_mode);
4094 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4095 GEN_INT (MIN (arg1_align, arg2_align)));
4101 enum machine_mode mode;
4104 /* Return the value in the proper mode for this function. */
4105 mode = TYPE_MODE (TREE_TYPE (exp));
4106 if (GET_MODE (result) == mode)
4109 return convert_to_mode (mode, result, 0);
4110 convert_move (target, result, 0);
4114 /* Expand the library call ourselves using a stabilized argument
4115 list to avoid re-evaluating the function's arguments twice. */
4116 #ifdef HAVE_cmpstrnsi
4119 fndecl = get_callee_fndecl (exp);
4120 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4121 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4122 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4123 return expand_call (fn, target, target == const0_rtx);
4129 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4130 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4131 the result in TARGET, if convenient. */
4134 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4135 ATTRIBUTE_UNUSED enum machine_mode mode)
4137 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4139 if (!validate_arglist (exp,
4140 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4143 /* If c_strlen can determine an expression for one of the string
4144 lengths, and it doesn't have side effects, then emit cmpstrnsi
4145 using length MIN(strlen(string)+1, arg3). */
4146 #ifdef HAVE_cmpstrnsi
4149 tree len, len1, len2;
4150 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4153 tree arg1 = CALL_EXPR_ARG (exp, 0);
4154 tree arg2 = CALL_EXPR_ARG (exp, 1);
4155 tree arg3 = CALL_EXPR_ARG (exp, 2);
4158 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4160 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4161 enum machine_mode insn_mode
4162 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4164 len1 = c_strlen (arg1, 1);
4165 len2 = c_strlen (arg2, 1);
4168 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4170 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4172 /* If we don't have a constant length for the first, use the length
4173 of the second, if we know it. We don't require a constant for
4174 this case; some cost analysis could be done if both are available
4175 but neither is constant. For now, assume they're equally cheap,
4176 unless one has side effects. If both strings have constant lengths,
4183 else if (TREE_SIDE_EFFECTS (len1))
4185 else if (TREE_SIDE_EFFECTS (len2))
4187 else if (TREE_CODE (len1) != INTEGER_CST)
4189 else if (TREE_CODE (len2) != INTEGER_CST)
4191 else if (tree_int_cst_lt (len1, len2))
4196 /* If both arguments have side effects, we cannot optimize. */
4197 if (!len || TREE_SIDE_EFFECTS (len))
4200 /* The actual new length parameter is MIN(len,arg3). */
4201 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4202 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4204 /* If we don't have POINTER_TYPE, call the function. */
4205 if (arg1_align == 0 || arg2_align == 0)
4208 /* Make a place to write the result of the instruction. */
4211 && REG_P (result) && GET_MODE (result) == insn_mode
4212 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4213 result = gen_reg_rtx (insn_mode);
4215 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4216 arg1 = builtin_save_expr (arg1);
4217 arg2 = builtin_save_expr (arg2);
4218 len = builtin_save_expr (len);
4220 arg1_rtx = get_memory_rtx (arg1, len);
4221 arg2_rtx = get_memory_rtx (arg2, len);
4222 arg3_rtx = expand_normal (len);
4223 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4224 GEN_INT (MIN (arg1_align, arg2_align)));
4229 /* Return the value in the proper mode for this function. */
4230 mode = TYPE_MODE (TREE_TYPE (exp));
4231 if (GET_MODE (result) == mode)
4234 return convert_to_mode (mode, result, 0);
4235 convert_move (target, result, 0);
4239 /* Expand the library call ourselves using a stabilized argument
4240 list to avoid re-evaluating the function's arguments twice. */
4241 fndecl = get_callee_fndecl (exp);
4242 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4243 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4244 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4245 return expand_call (fn, target, target == const0_rtx);
4251 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4252 if that's convenient. */
4255 expand_builtin_saveregs (void)
4259 /* Don't do __builtin_saveregs more than once in a function.
4260 Save the result of the first call and reuse it. */
4261 if (saveregs_value != 0)
4262 return saveregs_value;
4264 /* When this function is called, it means that registers must be
4265 saved on entry to this function. So we migrate the call to the
4266 first insn of this function. */
4270 /* Do whatever the machine needs done in this case. */
4271 val = targetm.calls.expand_builtin_saveregs ();
4276 saveregs_value = val;
4278 /* Put the insns after the NOTE that starts the function. If this
4279 is inside a start_sequence, make the outer-level insn chain current, so
4280 the code is placed at the start of the function. */
4281 push_topmost_sequence ();
4282 emit_insn_after (seq, entry_of_function ());
4283 pop_topmost_sequence ();
4288 /* __builtin_args_info (N) returns word N of the arg space info
4289 for the current function. The number and meanings of words
4290 is controlled by the definition of CUMULATIVE_ARGS. */
4293 expand_builtin_args_info (tree exp)
4295 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4296 int *word_ptr = (int *) &crtl->args.info;
4298 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4300 if (call_expr_nargs (exp) != 0)
4302 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4303 error ("argument of %<__builtin_args_info%> must be constant");
4306 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4308 if (wordnum < 0 || wordnum >= nwords)
4309 error ("argument of %<__builtin_args_info%> out of range");
4311 return GEN_INT (word_ptr[wordnum]);
4315 error ("missing argument in %<__builtin_args_info%>");
4320 /* Expand a call to __builtin_next_arg. */
4323 expand_builtin_next_arg (void)
4325 /* Checking arguments is already done in fold_builtin_next_arg
4326 that must be called before this function. */
4327 return expand_binop (ptr_mode, add_optab,
4328 crtl->args.internal_arg_pointer,
4329 crtl->args.arg_offset_rtx,
4330 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4333 /* Make it easier for the backends by protecting the valist argument
4334 from multiple evaluations. */
4337 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4339 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4341 gcc_assert (vatype != NULL_TREE);
4343 if (TREE_CODE (vatype) == ARRAY_TYPE)
4345 if (TREE_SIDE_EFFECTS (valist))
4346 valist = save_expr (valist);
4348 /* For this case, the backends will be expecting a pointer to
4349 vatype, but it's possible we've actually been given an array
4350 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4352 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4354 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4355 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4364 if (! TREE_SIDE_EFFECTS (valist))
4367 pt = build_pointer_type (vatype);
4368 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4369 TREE_SIDE_EFFECTS (valist) = 1;
4372 if (TREE_SIDE_EFFECTS (valist))
4373 valist = save_expr (valist);
4374 valist = build_fold_indirect_ref_loc (loc, valist);
4380 /* The "standard" definition of va_list is void*. */
4383 std_build_builtin_va_list (void)
4385 return ptr_type_node;
4388 /* The "standard" abi va_list is va_list_type_node. */
4391 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4393 return va_list_type_node;
4396 /* The "standard" type of va_list is va_list_type_node. */
4399 std_canonical_va_list_type (tree type)
4403 if (INDIRECT_REF_P (type))
4404 type = TREE_TYPE (type);
4405 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4406 type = TREE_TYPE (type);
4407 wtype = va_list_type_node;
4409 /* Treat structure va_list types. */
4410 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4411 htype = TREE_TYPE (htype);
4412 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4414 /* If va_list is an array type, the argument may have decayed
4415 to a pointer type, e.g. by being passed to another function.
4416 In that case, unwrap both types so that we can compare the
4417 underlying records. */
4418 if (TREE_CODE (htype) == ARRAY_TYPE
4419 || POINTER_TYPE_P (htype))
4421 wtype = TREE_TYPE (wtype);
4422 htype = TREE_TYPE (htype);
4425 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4426 return va_list_type_node;
4431 /* The "standard" implementation of va_start: just assign `nextarg' to
4435 std_expand_builtin_va_start (tree valist, rtx nextarg)
4437 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4438 convert_move (va_r, nextarg, 0);
4441 /* Expand EXP, a call to __builtin_va_start. */
4444 expand_builtin_va_start (tree exp)
4448 location_t loc = EXPR_LOCATION (exp);
4450 if (call_expr_nargs (exp) < 2)
4452 error_at (loc, "too few arguments to function %<va_start%>");
4456 if (fold_builtin_next_arg (exp, true))
4459 nextarg = expand_builtin_next_arg ();
4460 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4462 if (targetm.expand_builtin_va_start)
4463 targetm.expand_builtin_va_start (valist, nextarg);
4465 std_expand_builtin_va_start (valist, nextarg);
4470 /* The "standard" implementation of va_arg: read the value from the
4471 current (padded) address and increment by the (padded) size. */
4474 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4477 tree addr, t, type_size, rounded_size, valist_tmp;
4478 unsigned HOST_WIDE_INT align, boundary;
4481 #ifdef ARGS_GROW_DOWNWARD
4482 /* All of the alignment and movement below is for args-grow-up machines.
4483 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4484 implement their own specialized gimplify_va_arg_expr routines. */
4488 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4490 type = build_pointer_type (type);
4492 align = PARM_BOUNDARY / BITS_PER_UNIT;
4493 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4495 /* When we align parameter on stack for caller, if the parameter
4496 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4497 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4498 here with caller. */
4499 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4500 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4502 boundary /= BITS_PER_UNIT;
4504 /* Hoist the valist value into a temporary for the moment. */
4505 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4507 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4508 requires greater alignment, we must perform dynamic alignment. */
4509 if (boundary > align
4510 && !integer_zerop (TYPE_SIZE (type)))
4512 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4513 fold_build2 (POINTER_PLUS_EXPR,
4515 valist_tmp, size_int (boundary - 1)));
4516 gimplify_and_add (t, pre_p);
4518 t = fold_convert (sizetype, valist_tmp);
4519 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4520 fold_convert (TREE_TYPE (valist),
4521 fold_build2 (BIT_AND_EXPR, sizetype, t,
4522 size_int (-boundary))));
4523 gimplify_and_add (t, pre_p);
4528 /* If the actual alignment is less than the alignment of the type,
4529 adjust the type accordingly so that we don't assume strict alignment
4530 when dereferencing the pointer. */
4531 boundary *= BITS_PER_UNIT;
4532 if (boundary < TYPE_ALIGN (type))
4534 type = build_variant_type_copy (type);
4535 TYPE_ALIGN (type) = boundary;
4538 /* Compute the rounded size of the type. */
4539 type_size = size_in_bytes (type);
4540 rounded_size = round_up (type_size, align);
4542 /* Reduce rounded_size so it's sharable with the postqueue. */
4543 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4547 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4549 /* Small args are padded downward. */
4550 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4551 rounded_size, size_int (align));
4552 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4553 size_binop (MINUS_EXPR, rounded_size, type_size));
4554 addr = fold_build2 (POINTER_PLUS_EXPR,
4555 TREE_TYPE (addr), addr, t);
4558 /* Compute new value for AP. */
4559 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4560 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4561 gimplify_and_add (t, pre_p);
4563 addr = fold_convert (build_pointer_type (type), addr);
4566 addr = build_va_arg_indirect_ref (addr);
4568 return build_va_arg_indirect_ref (addr);
4571 /* Build an indirect-ref expression over the given TREE, which represents a
4572 piece of a va_arg() expansion. */
4574 build_va_arg_indirect_ref (tree addr)
4576 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4578 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4584 /* Return a dummy expression of type TYPE in order to keep going after an
4588 dummy_object (tree type)
4590 tree t = build_int_cst (build_pointer_type (type), 0);
4591 return build1 (INDIRECT_REF, type, t);
4594 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4595 builtin function, but a very special sort of operator. */
4597 enum gimplify_status
4598 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4600 tree promoted_type, have_va_type;
4601 tree valist = TREE_OPERAND (*expr_p, 0);
4602 tree type = TREE_TYPE (*expr_p);
4604 location_t loc = EXPR_LOCATION (*expr_p);
4606 /* Verify that valist is of the proper type. */
4607 have_va_type = TREE_TYPE (valist);
4608 if (have_va_type == error_mark_node)
4610 have_va_type = targetm.canonical_va_list_type (have_va_type);
4612 if (have_va_type == NULL_TREE)
4614 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4618 /* Generate a diagnostic for requesting data of a type that cannot
4619 be passed through `...' due to type promotion at the call site. */
4620 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4623 static bool gave_help;
4626 /* Unfortunately, this is merely undefined, rather than a constraint
4627 violation, so we cannot make this an error. If this call is never
4628 executed, the program is still strictly conforming. */
4629 warned = warning_at (loc, 0,
4630 "%qT is promoted to %qT when passed through %<...%>",
4631 type, promoted_type);
4632 if (!gave_help && warned)
4635 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4636 promoted_type, type);
4639 /* We can, however, treat "undefined" any way we please.
4640 Call abort to encourage the user to fix the program. */
4642 inform (loc, "if this code is reached, the program will abort");
4643 /* Before the abort, allow the evaluation of the va_list
4644 expression to exit or longjmp. */
4645 gimplify_and_add (valist, pre_p);
4646 t = build_call_expr_loc (loc,
4647 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4648 gimplify_and_add (t, pre_p);
4650 /* This is dead code, but go ahead and finish so that the
4651 mode of the result comes out right. */
4652 *expr_p = dummy_object (type);
4657 /* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4659 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4661 /* For this case, the backends will be expecting a pointer to
4662 TREE_TYPE (abi), but it's possible we've
4663 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4665 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4667 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4668 valist = fold_convert_loc (loc, p1,
4669 build_fold_addr_expr_loc (loc, valist));
4672 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4675 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4677 if (!targetm.gimplify_va_arg_expr)
4678 /* FIXME: Once most targets are converted we should merely
4679 assert this is non-null. */
4682 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4687 /* Expand EXP, a call to __builtin_va_end. */
4690 expand_builtin_va_end (tree exp)
4692 tree valist = CALL_EXPR_ARG (exp, 0);
4694 /* Evaluate for side effects, if needed. I hate macros that don't
4696 if (TREE_SIDE_EFFECTS (valist))
4697 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4702 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4703 builtin rather than just as an assignment in stdarg.h because of the
4704 nastiness of array-type va_list types. */
4707 expand_builtin_va_copy (tree exp)
4710 location_t loc = EXPR_LOCATION (exp);
4712 dst = CALL_EXPR_ARG (exp, 0);
4713 src = CALL_EXPR_ARG (exp, 1);
4715 dst = stabilize_va_list_loc (loc, dst, 1);
4716 src = stabilize_va_list_loc (loc, src, 0);
4718 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4720 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4722 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4723 TREE_SIDE_EFFECTS (t) = 1;
4724 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4728 rtx dstb, srcb, size;
4730 /* Evaluate to pointers. */
4731 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4732 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4733 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4734 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4736 dstb = convert_memory_address (Pmode, dstb);
4737 srcb = convert_memory_address (Pmode, srcb);
4739 /* "Dereference" to BLKmode memories. */
4740 dstb = gen_rtx_MEM (BLKmode, dstb);
4741 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4742 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4743 srcb = gen_rtx_MEM (BLKmode, srcb);
4744 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4745 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4748 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4754 /* Expand a call to one of the builtin functions __builtin_frame_address or
4755 __builtin_return_address. */
4758 expand_builtin_frame_address (tree fndecl, tree exp)
4760 /* The argument must be a nonnegative integer constant.
4761 It counts the number of frames to scan up the stack.
4762 The value is the return address saved in that frame. */
4763 if (call_expr_nargs (exp) == 0)
4764 /* Warning about missing arg was already issued. */
4766 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4768 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4769 error ("invalid argument to %<__builtin_frame_address%>");
4771 error ("invalid argument to %<__builtin_return_address%>");
4777 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4778 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4780 /* Some ports cannot access arbitrary stack frames. */
4783 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4784 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4786 warning (0, "unsupported argument to %<__builtin_return_address%>");
4790 /* For __builtin_frame_address, return what we've got. */
4791 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4795 && ! CONSTANT_P (tem))
4796 tem = copy_to_mode_reg (Pmode, tem);
4801 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4802 we failed and the caller should emit a normal call, otherwise try to get
4803 the result in TARGET, if convenient. */
4806 expand_builtin_alloca (tree exp, rtx target)
4811 /* Emit normal call if marked not-inlineable. */
4812 if (CALL_CANNOT_INLINE_P (exp))
4815 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4818 /* Compute the argument. */
4819 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4821 /* Allocate the desired space. */
4822 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4823 result = convert_memory_address (ptr_mode, result);
4828 /* Expand a call to a bswap builtin with argument ARG0. MODE
4829 is the mode to expand with. */
4832 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4834 enum machine_mode mode;
4838 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4841 arg = CALL_EXPR_ARG (exp, 0);
4842 mode = TYPE_MODE (TREE_TYPE (arg));
4843 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4845 target = expand_unop (mode, bswap_optab, op0, target, 1);
4847 gcc_assert (target);
4849 return convert_to_mode (mode, target, 0);
4852 /* Expand a call to a unary builtin in EXP.
4853 Return NULL_RTX if a normal call should be emitted rather than expanding the
4854 function in-line. If convenient, the result should be placed in TARGET.
4855 SUBTARGET may be used as the target for computing one of EXP's operands. */
4858 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4859 rtx subtarget, optab op_optab)
4863 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4866 /* Compute the argument. */
4867 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4868 VOIDmode, EXPAND_NORMAL);
4869 /* Compute op, into TARGET if possible.
4870 Set TARGET to wherever the result comes back. */
4871 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4872 op_optab, op0, target, 1);
4873 gcc_assert (target);
4875 return convert_to_mode (target_mode, target, 0);
4878 /* Expand a call to __builtin_expect. We just return our argument
4879 as the builtin_expect semantic should've been already executed by
4880 tree branch prediction pass. */
4883 expand_builtin_expect (tree exp, rtx target)
4887 if (call_expr_nargs (exp) < 2)
4889 arg = CALL_EXPR_ARG (exp, 0);
4891 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4892 /* When guessing was done, the hints should be already stripped away. */
4893 gcc_assert (!flag_guess_branch_prob
4894 || optimize == 0 || errorcount || sorrycount);
4899 expand_builtin_trap (void)
4903 emit_insn (gen_trap ());
4906 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4910 /* Expand a call to __builtin_unreachable. We do nothing except emit
4911 a barrier saying that control flow will not pass here.
4913 It is the responsibility of the program being compiled to ensure
4914 that control flow does never reach __builtin_unreachable. */
4916 expand_builtin_unreachable (void)
4921 /* Expand EXP, a call to fabs, fabsf or fabsl.
4922 Return NULL_RTX if a normal call should be emitted rather than expanding
4923 the function inline. If convenient, the result should be placed
4924 in TARGET. SUBTARGET may be used as the target for computing
4928 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4930 enum machine_mode mode;
4934 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4937 arg = CALL_EXPR_ARG (exp, 0);
4938 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4939 mode = TYPE_MODE (TREE_TYPE (arg));
4940 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4941 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4944 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4945 Return NULL is a normal call should be emitted rather than expanding the
4946 function inline. If convenient, the result should be placed in TARGET.
4947 SUBTARGET may be used as the target for computing the operand. */
4950 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4955 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4958 arg = CALL_EXPR_ARG (exp, 0);
4959 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4961 arg = CALL_EXPR_ARG (exp, 1);
4962 op1 = expand_normal (arg);
4964 return expand_copysign (op0, op1, target);
4967 /* Create a new constant string literal and return a char* pointer to it.
4968 The STRING_CST value is the LEN characters at STR. */
4970 build_string_literal (int len, const char *str)
4972 tree t, elem, index, type;
4974 t = build_string (len, str);
4975 elem = build_type_variant (char_type_node, 1, 0);
4976 index = build_index_type (size_int (len - 1));
4977 type = build_array_type (elem, index);
4978 TREE_TYPE (t) = type;
4979 TREE_CONSTANT (t) = 1;
4980 TREE_READONLY (t) = 1;
4981 TREE_STATIC (t) = 1;
4983 type = build_pointer_type (elem);
4984 t = build1 (ADDR_EXPR, type,
4985 build4 (ARRAY_REF, elem,
4986 t, integer_zero_node, NULL_TREE, NULL_TREE));
4990 /* Expand a call to either the entry or exit function profiler. */
4993 expand_builtin_profile_func (bool exitp)
4995 rtx this_rtx, which;
4997 this_rtx = DECL_RTL (current_function_decl);
4998 gcc_assert (MEM_P (this_rtx));
4999 this_rtx = XEXP (this_rtx, 0);
5002 which = profile_function_exit_libfunc;
5004 which = profile_function_entry_libfunc;
5006 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5007 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5014 /* Expand a call to __builtin___clear_cache. */
5017 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5019 #ifndef HAVE_clear_cache
5020 #ifdef CLEAR_INSN_CACHE
5021 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5022 does something. Just do the default expansion to a call to
5026 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5027 does nothing. There is no need to call it. Do nothing. */
5029 #endif /* CLEAR_INSN_CACHE */
5031 /* We have a "clear_cache" insn, and it will handle everything. */
5033 rtx begin_rtx, end_rtx;
5034 enum insn_code icode;
5036 /* We must not expand to a library call. If we did, any
5037 fallback library function in libgcc that might contain a call to
5038 __builtin___clear_cache() would recurse infinitely. */
5039 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5041 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5045 if (HAVE_clear_cache)
5047 icode = CODE_FOR_clear_cache;
5049 begin = CALL_EXPR_ARG (exp, 0);
5050 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5051 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5052 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5053 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5055 end = CALL_EXPR_ARG (exp, 1);
5056 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5057 end_rtx = convert_memory_address (Pmode, end_rtx);
5058 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5059 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5061 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5064 #endif /* HAVE_clear_cache */
5067 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5070 round_trampoline_addr (rtx tramp)
5072 rtx temp, addend, mask;
5074 /* If we don't need too much alignment, we'll have been guaranteed
5075 proper alignment by get_trampoline_type. */
5076 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5079 /* Round address up to desired boundary. */
5080 temp = gen_reg_rtx (Pmode);
5081 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5082 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5084 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5085 temp, 0, OPTAB_LIB_WIDEN);
5086 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5087 temp, 0, OPTAB_LIB_WIDEN);
5093 expand_builtin_init_trampoline (tree exp)
5095 tree t_tramp, t_func, t_chain;
5096 rtx m_tramp, r_tramp, r_chain, tmp;
5098 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5099 POINTER_TYPE, VOID_TYPE))
5102 t_tramp = CALL_EXPR_ARG (exp, 0);
5103 t_func = CALL_EXPR_ARG (exp, 1);
5104 t_chain = CALL_EXPR_ARG (exp, 2);
5106 r_tramp = expand_normal (t_tramp);
5107 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5108 MEM_NOTRAP_P (m_tramp) = 1;
5110 /* The TRAMP argument should be the address of a field within the
5111 local function's FRAME decl. Let's see if we can fill in the
5112 to fill in the MEM_ATTRs for this memory. */
5113 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5114 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5117 tmp = round_trampoline_addr (r_tramp);
5120 m_tramp = change_address (m_tramp, BLKmode, tmp);
5121 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5122 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5125 /* The FUNC argument should be the address of the nested function.
5126 Extract the actual function decl to pass to the hook. */
5127 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5128 t_func = TREE_OPERAND (t_func, 0);
5129 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5131 r_chain = expand_normal (t_chain);
5133 /* Generate insns to initialize the trampoline. */
5134 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5136 trampolines_created = 1;
5141 expand_builtin_adjust_trampoline (tree exp)
5145 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5148 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5149 tramp = round_trampoline_addr (tramp);
5150 if (targetm.calls.trampoline_adjust_address)
5151 tramp = targetm.calls.trampoline_adjust_address (tramp);
5156 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5157 function. The function first checks whether the back end provides
5158 an insn to implement signbit for the respective mode. If not, it
5159 checks whether the floating point format of the value is such that
5160 the sign bit can be extracted. If that is not the case, the
5161 function returns NULL_RTX to indicate that a normal call should be
5162 emitted rather than expanding the function in-line. EXP is the
5163 expression that is a call to the builtin function; if convenient,
5164 the result should be placed in TARGET. */
5166 expand_builtin_signbit (tree exp, rtx target)
5168 const struct real_format *fmt;
5169 enum machine_mode fmode, imode, rmode;
5170 HOST_WIDE_INT hi, lo;
5173 enum insn_code icode;
5175 location_t loc = EXPR_LOCATION (exp);
5177 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5180 arg = CALL_EXPR_ARG (exp, 0);
5181 fmode = TYPE_MODE (TREE_TYPE (arg));
5182 rmode = TYPE_MODE (TREE_TYPE (exp));
5183 fmt = REAL_MODE_FORMAT (fmode);
5185 arg = builtin_save_expr (arg);
5187 /* Expand the argument yielding a RTX expression. */
5188 temp = expand_normal (arg);
5190 /* Check if the back end provides an insn that handles signbit for the
5192 icode = signbit_optab->handlers [(int) fmode].insn_code;
5193 if (icode != CODE_FOR_nothing)
5195 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5196 emit_unop_insn (icode, target, temp, UNKNOWN);
5200 /* For floating point formats without a sign bit, implement signbit
5202 bitpos = fmt->signbit_ro;
5205 /* But we can't do this if the format supports signed zero. */
5206 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5209 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5210 build_real (TREE_TYPE (arg), dconst0));
5211 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5214 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5216 imode = int_mode_for_mode (fmode);
5217 if (imode == BLKmode)
5219 temp = gen_lowpart (imode, temp);
5224 /* Handle targets with different FP word orders. */
5225 if (FLOAT_WORDS_BIG_ENDIAN)
5226 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5228 word = bitpos / BITS_PER_WORD;
5229 temp = operand_subword_force (temp, word, fmode);
5230 bitpos = bitpos % BITS_PER_WORD;
5233 /* Force the intermediate word_mode (or narrower) result into a
5234 register. This avoids attempting to create paradoxical SUBREGs
5235 of floating point modes below. */
5236 temp = force_reg (imode, temp);
5238 /* If the bitpos is within the "result mode" lowpart, the operation
5239 can be implement with a single bitwise AND. Otherwise, we need
5240 a right shift and an AND. */
5242 if (bitpos < GET_MODE_BITSIZE (rmode))
5244 if (bitpos < HOST_BITS_PER_WIDE_INT)
5247 lo = (HOST_WIDE_INT) 1 << bitpos;
5251 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5255 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5256 temp = gen_lowpart (rmode, temp);
5257 temp = expand_binop (rmode, and_optab, temp,
5258 immed_double_const (lo, hi, rmode),
5259 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5263 /* Perform a logical right shift to place the signbit in the least
5264 significant bit, then truncate the result to the desired mode
5265 and mask just this bit. */
5266 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5267 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5268 temp = gen_lowpart (rmode, temp);
5269 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5270 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5276 /* Expand fork or exec calls. TARGET is the desired target of the
5277 call. EXP is the call. FN is the
5278 identificator of the actual function. IGNORE is nonzero if the
5279 value is to be ignored. */
5282 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5287 /* If we are not profiling, just call the function. */
5288 if (!profile_arc_flag)
5291 /* Otherwise call the wrapper. This should be equivalent for the rest of
5292 compiler, so the code does not diverge, and the wrapper may run the
5293 code necessary for keeping the profiling sane. */
5295 switch (DECL_FUNCTION_CODE (fn))
5298 id = get_identifier ("__gcov_fork");
5301 case BUILT_IN_EXECL:
5302 id = get_identifier ("__gcov_execl");
5305 case BUILT_IN_EXECV:
5306 id = get_identifier ("__gcov_execv");
5309 case BUILT_IN_EXECLP:
5310 id = get_identifier ("__gcov_execlp");
5313 case BUILT_IN_EXECLE:
5314 id = get_identifier ("__gcov_execle");
5317 case BUILT_IN_EXECVP:
5318 id = get_identifier ("__gcov_execvp");
5321 case BUILT_IN_EXECVE:
5322 id = get_identifier ("__gcov_execve");
5329 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5330 FUNCTION_DECL, id, TREE_TYPE (fn));
5331 DECL_EXTERNAL (decl) = 1;
5332 TREE_PUBLIC (decl) = 1;
5333 DECL_ARTIFICIAL (decl) = 1;
5334 TREE_NOTHROW (decl) = 1;
5335 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5336 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5337 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5338 return expand_call (call, target, ignore);
5343 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5344 the pointer in these functions is void*, the tree optimizers may remove
5345 casts. The mode computed in expand_builtin isn't reliable either, due
5346 to __sync_bool_compare_and_swap.
5348 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5349 group of builtins. This gives us log2 of the mode size. */
5351 static inline enum machine_mode
5352 get_builtin_sync_mode (int fcode_diff)
5354 /* The size is not negotiable, so ask not to get BLKmode in return
5355 if the target indicates that a smaller size would be better. */
5356 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5359 /* Expand the memory expression LOC and return the appropriate memory operand
5360 for the builtin_sync operations. */
5363 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5367 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5368 addr = convert_memory_address (Pmode, addr);
5370 /* Note that we explicitly do not want any alias information for this
5371 memory, so that we kill all other live memories. Otherwise we don't
5372 satisfy the full barrier semantics of the intrinsic. */
5373 mem = validize_mem (gen_rtx_MEM (mode, addr));
5375 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5376 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5377 MEM_VOLATILE_P (mem) = 1;
5382 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5383 EXP is the CALL_EXPR. CODE is the rtx code
5384 that corresponds to the arithmetic or logical operation from the name;
5385 an exception here is that NOT actually means NAND. TARGET is an optional
5386 place for us to store the results; AFTER is true if this is the
5387 fetch_and_xxx form. IGNORE is true if we don't actually care about
5388 the result of the operation at all. */
5391 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5392 enum rtx_code code, bool after,
5393 rtx target, bool ignore)
5396 enum machine_mode old_mode;
5397 location_t loc = EXPR_LOCATION (exp);
5399 if (code == NOT && warn_sync_nand)
5401 tree fndecl = get_callee_fndecl (exp);
5402 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5404 static bool warned_f_a_n, warned_n_a_f;
5408 case BUILT_IN_FETCH_AND_NAND_1:
5409 case BUILT_IN_FETCH_AND_NAND_2:
5410 case BUILT_IN_FETCH_AND_NAND_4:
5411 case BUILT_IN_FETCH_AND_NAND_8:
5412 case BUILT_IN_FETCH_AND_NAND_16:
5417 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5418 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5419 warned_f_a_n = true;
5422 case BUILT_IN_NAND_AND_FETCH_1:
5423 case BUILT_IN_NAND_AND_FETCH_2:
5424 case BUILT_IN_NAND_AND_FETCH_4:
5425 case BUILT_IN_NAND_AND_FETCH_8:
5426 case BUILT_IN_NAND_AND_FETCH_16:
5431 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5432 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5433 warned_n_a_f = true;
5441 /* Expand the operands. */
5442 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5444 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5445 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5446 of CONST_INTs, where we know the old_mode only from the call argument. */
5447 old_mode = GET_MODE (val);
5448 if (old_mode == VOIDmode)
5449 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5450 val = convert_modes (mode, old_mode, val, 1);
5453 return expand_sync_operation (mem, val, code);
5455 return expand_sync_fetch_operation (mem, val, code, after, target);
5458 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5459 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5460 true if this is the boolean form. TARGET is a place for us to store the
5461 results; this is NOT optional if IS_BOOL is true. */
5464 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5465 bool is_bool, rtx target)
5467 rtx old_val, new_val, mem;
5468 enum machine_mode old_mode;
5470 /* Expand the operands. */
5471 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5474 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5475 mode, EXPAND_NORMAL);
5476 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5477 of CONST_INTs, where we know the old_mode only from the call argument. */
5478 old_mode = GET_MODE (old_val);
5479 if (old_mode == VOIDmode)
5480 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5481 old_val = convert_modes (mode, old_mode, old_val, 1);
5483 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5484 mode, EXPAND_NORMAL);
5485 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5486 of CONST_INTs, where we know the old_mode only from the call argument. */
5487 old_mode = GET_MODE (new_val);
5488 if (old_mode == VOIDmode)
5489 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5490 new_val = convert_modes (mode, old_mode, new_val, 1);
5493 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5495 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5498 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5499 general form is actually an atomic exchange, and some targets only
5500 support a reduced form with the second argument being a constant 1.
5501 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5505 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5509 enum machine_mode old_mode;
5511 /* Expand the operands. */
5512 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5513 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5514 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5515 of CONST_INTs, where we know the old_mode only from the call argument. */
5516 old_mode = GET_MODE (val);
5517 if (old_mode == VOIDmode)
5518 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5519 val = convert_modes (mode, old_mode, val, 1);
5521 return expand_sync_lock_test_and_set (mem, val, target);
5524 /* Expand the __sync_synchronize intrinsic. */
5527 expand_builtin_synchronize (void)
5530 VEC (tree, gc) *v_clobbers;
5532 #ifdef HAVE_memory_barrier
5533 if (HAVE_memory_barrier)
5535 emit_insn (gen_memory_barrier ());
5540 if (synchronize_libfunc != NULL_RTX)
5542 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5546 /* If no explicit memory barrier instruction is available, create an
5547 empty asm stmt with a memory clobber. */
5548 v_clobbers = VEC_alloc (tree, gc, 1);
5549 VEC_quick_push (tree, v_clobbers,
5550 tree_cons (NULL, build_string (6, "memory"), NULL));
5551 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5552 gimple_asm_set_volatile (x, true);
5553 expand_asm_stmt (x);
5556 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5559 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5561 enum insn_code icode;
5563 rtx val = const0_rtx;
5565 /* Expand the operands. */
5566 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5568 /* If there is an explicit operation in the md file, use it. */
5569 icode = sync_lock_release[mode];
5570 if (icode != CODE_FOR_nothing)
5572 if (!insn_data[icode].operand[1].predicate (val, mode))
5573 val = force_reg (mode, val);
5575 insn = GEN_FCN (icode) (mem, val);
5583 /* Otherwise we can implement this operation by emitting a barrier
5584 followed by a store of zero. */
5585 expand_builtin_synchronize ();
5586 emit_move_insn (mem, val);
5589 /* Expand an expression EXP that calls a built-in function,
5590 with result going to TARGET if that's convenient
5591 (and in mode MODE if that's convenient).
5592 SUBTARGET may be used as the target for computing one of EXP's operands.
5593 IGNORE is nonzero if the value is to be ignored. */
5596 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5599 tree fndecl = get_callee_fndecl (exp);
5600 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5601 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5603 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5604 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5606 /* When not optimizing, generate calls to library functions for a certain
5609 && !called_as_built_in (fndecl)
5610 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5611 && fcode != BUILT_IN_ALLOCA
5612 && fcode != BUILT_IN_FREE)
5613 return expand_call (exp, target, ignore);
5615 /* The built-in function expanders test for target == const0_rtx
5616 to determine whether the function's result will be ignored. */
5618 target = const0_rtx;
5620 /* If the result of a pure or const built-in function is ignored, and
5621 none of its arguments are volatile, we can avoid expanding the
5622 built-in call and just evaluate the arguments for side-effects. */
5623 if (target == const0_rtx
5624 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5626 bool volatilep = false;
5628 call_expr_arg_iterator iter;
5630 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5631 if (TREE_THIS_VOLATILE (arg))
5639 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5640 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5647 CASE_FLT_FN (BUILT_IN_FABS):
5648 target = expand_builtin_fabs (exp, target, subtarget);
5653 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5654 target = expand_builtin_copysign (exp, target, subtarget);
5659 /* Just do a normal library call if we were unable to fold
5661 CASE_FLT_FN (BUILT_IN_CABS):
5664 CASE_FLT_FN (BUILT_IN_EXP):
5665 CASE_FLT_FN (BUILT_IN_EXP10):
5666 CASE_FLT_FN (BUILT_IN_POW10):
5667 CASE_FLT_FN (BUILT_IN_EXP2):
5668 CASE_FLT_FN (BUILT_IN_EXPM1):
5669 CASE_FLT_FN (BUILT_IN_LOGB):
5670 CASE_FLT_FN (BUILT_IN_LOG):
5671 CASE_FLT_FN (BUILT_IN_LOG10):
5672 CASE_FLT_FN (BUILT_IN_LOG2):
5673 CASE_FLT_FN (BUILT_IN_LOG1P):
5674 CASE_FLT_FN (BUILT_IN_TAN):
5675 CASE_FLT_FN (BUILT_IN_ASIN):
5676 CASE_FLT_FN (BUILT_IN_ACOS):
5677 CASE_FLT_FN (BUILT_IN_ATAN):
5678 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5679 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5680 because of possible accuracy problems. */
5681 if (! flag_unsafe_math_optimizations)
5683 CASE_FLT_FN (BUILT_IN_SQRT):
5684 CASE_FLT_FN (BUILT_IN_FLOOR):
5685 CASE_FLT_FN (BUILT_IN_CEIL):
5686 CASE_FLT_FN (BUILT_IN_TRUNC):
5687 CASE_FLT_FN (BUILT_IN_ROUND):
5688 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5689 CASE_FLT_FN (BUILT_IN_RINT):
5690 target = expand_builtin_mathfn (exp, target, subtarget);
5695 CASE_FLT_FN (BUILT_IN_ILOGB):
5696 if (! flag_unsafe_math_optimizations)
5698 CASE_FLT_FN (BUILT_IN_ISINF):
5699 CASE_FLT_FN (BUILT_IN_FINITE):
5700 case BUILT_IN_ISFINITE:
5701 case BUILT_IN_ISNORMAL:
5702 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5707 CASE_FLT_FN (BUILT_IN_LCEIL):
5708 CASE_FLT_FN (BUILT_IN_LLCEIL):
5709 CASE_FLT_FN (BUILT_IN_LFLOOR):
5710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5711 target = expand_builtin_int_roundingfn (exp, target);
5716 CASE_FLT_FN (BUILT_IN_LRINT):
5717 CASE_FLT_FN (BUILT_IN_LLRINT):
5718 CASE_FLT_FN (BUILT_IN_LROUND):
5719 CASE_FLT_FN (BUILT_IN_LLROUND):
5720 target = expand_builtin_int_roundingfn_2 (exp, target);
5725 CASE_FLT_FN (BUILT_IN_POW):
5726 target = expand_builtin_pow (exp, target, subtarget);
5731 CASE_FLT_FN (BUILT_IN_POWI):
5732 target = expand_builtin_powi (exp, target, subtarget);
5737 CASE_FLT_FN (BUILT_IN_ATAN2):
5738 CASE_FLT_FN (BUILT_IN_LDEXP):
5739 CASE_FLT_FN (BUILT_IN_SCALB):
5740 CASE_FLT_FN (BUILT_IN_SCALBN):
5741 CASE_FLT_FN (BUILT_IN_SCALBLN):
5742 if (! flag_unsafe_math_optimizations)
5745 CASE_FLT_FN (BUILT_IN_FMOD):
5746 CASE_FLT_FN (BUILT_IN_REMAINDER):
5747 CASE_FLT_FN (BUILT_IN_DREM):
5748 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5753 CASE_FLT_FN (BUILT_IN_CEXPI):
5754 target = expand_builtin_cexpi (exp, target, subtarget);
5755 gcc_assert (target);
5758 CASE_FLT_FN (BUILT_IN_SIN):
5759 CASE_FLT_FN (BUILT_IN_COS):
5760 if (! flag_unsafe_math_optimizations)
5762 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5767 CASE_FLT_FN (BUILT_IN_SINCOS):
5768 if (! flag_unsafe_math_optimizations)
5770 target = expand_builtin_sincos (exp);
5775 case BUILT_IN_APPLY_ARGS:
5776 return expand_builtin_apply_args ();
5778 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5779 FUNCTION with a copy of the parameters described by
5780 ARGUMENTS, and ARGSIZE. It returns a block of memory
5781 allocated on the stack into which is stored all the registers
5782 that might possibly be used for returning the result of a
5783 function. ARGUMENTS is the value returned by
5784 __builtin_apply_args. ARGSIZE is the number of bytes of
5785 arguments that must be copied. ??? How should this value be
5786 computed? We'll also need a safe worst case value for varargs
5788 case BUILT_IN_APPLY:
5789 if (!validate_arglist (exp, POINTER_TYPE,
5790 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5791 && !validate_arglist (exp, REFERENCE_TYPE,
5792 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5798 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5799 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5800 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5802 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5805 /* __builtin_return (RESULT) causes the function to return the
5806 value described by RESULT. RESULT is address of the block of
5807 memory returned by __builtin_apply. */
5808 case BUILT_IN_RETURN:
5809 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5810 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5813 case BUILT_IN_SAVEREGS:
5814 return expand_builtin_saveregs ();
5816 case BUILT_IN_ARGS_INFO:
5817 return expand_builtin_args_info (exp);
5819 case BUILT_IN_VA_ARG_PACK:
5820 /* All valid uses of __builtin_va_arg_pack () are removed during
5822 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5825 case BUILT_IN_VA_ARG_PACK_LEN:
5826 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5828 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5831 /* Return the address of the first anonymous stack arg. */
5832 case BUILT_IN_NEXT_ARG:
5833 if (fold_builtin_next_arg (exp, false))
5835 return expand_builtin_next_arg ();
5837 case BUILT_IN_CLEAR_CACHE:
5838 target = expand_builtin___clear_cache (exp);
5843 case BUILT_IN_CLASSIFY_TYPE:
5844 return expand_builtin_classify_type (exp);
5846 case BUILT_IN_CONSTANT_P:
5849 case BUILT_IN_FRAME_ADDRESS:
5850 case BUILT_IN_RETURN_ADDRESS:
5851 return expand_builtin_frame_address (fndecl, exp);
5853 /* Returns the address of the area where the structure is returned.
5855 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5856 if (call_expr_nargs (exp) != 0
5857 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5858 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5861 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5863 case BUILT_IN_ALLOCA:
5864 target = expand_builtin_alloca (exp, target);
5869 case BUILT_IN_STACK_SAVE:
5870 return expand_stack_save ();
5872 case BUILT_IN_STACK_RESTORE:
5873 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5876 case BUILT_IN_BSWAP32:
5877 case BUILT_IN_BSWAP64:
5878 target = expand_builtin_bswap (exp, target, subtarget);
5884 CASE_INT_FN (BUILT_IN_FFS):
5885 case BUILT_IN_FFSIMAX:
5886 target = expand_builtin_unop (target_mode, exp, target,
5887 subtarget, ffs_optab);
5892 CASE_INT_FN (BUILT_IN_CLZ):
5893 case BUILT_IN_CLZIMAX:
5894 target = expand_builtin_unop (target_mode, exp, target,
5895 subtarget, clz_optab);
5900 CASE_INT_FN (BUILT_IN_CTZ):
5901 case BUILT_IN_CTZIMAX:
5902 target = expand_builtin_unop (target_mode, exp, target,
5903 subtarget, ctz_optab);
5908 CASE_INT_FN (BUILT_IN_POPCOUNT):
5909 case BUILT_IN_POPCOUNTIMAX:
5910 target = expand_builtin_unop (target_mode, exp, target,
5911 subtarget, popcount_optab);
5916 CASE_INT_FN (BUILT_IN_PARITY):
5917 case BUILT_IN_PARITYIMAX:
5918 target = expand_builtin_unop (target_mode, exp, target,
5919 subtarget, parity_optab);
5924 case BUILT_IN_STRLEN:
5925 target = expand_builtin_strlen (exp, target, target_mode);
5930 case BUILT_IN_STRCPY:
5931 target = expand_builtin_strcpy (exp, target);
5936 case BUILT_IN_STRNCPY:
5937 target = expand_builtin_strncpy (exp, target);
5942 case BUILT_IN_STPCPY:
5943 target = expand_builtin_stpcpy (exp, target, mode);
5948 case BUILT_IN_MEMCPY:
5949 target = expand_builtin_memcpy (exp, target);
5954 case BUILT_IN_MEMPCPY:
5955 target = expand_builtin_mempcpy (exp, target, mode);
5960 case BUILT_IN_MEMSET:
5961 target = expand_builtin_memset (exp, target, mode);
5966 case BUILT_IN_BZERO:
5967 target = expand_builtin_bzero (exp);
5972 case BUILT_IN_STRCMP:
5973 target = expand_builtin_strcmp (exp, target);
5978 case BUILT_IN_STRNCMP:
5979 target = expand_builtin_strncmp (exp, target, mode);
5985 case BUILT_IN_MEMCMP:
5986 target = expand_builtin_memcmp (exp, target, mode);
5991 case BUILT_IN_SETJMP:
5992 /* This should have been lowered to the builtins below. */
5995 case BUILT_IN_SETJMP_SETUP:
5996 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5997 and the receiver label. */
5998 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6000 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6001 VOIDmode, EXPAND_NORMAL);
6002 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6003 rtx label_r = label_rtx (label);
6005 /* This is copied from the handling of non-local gotos. */
6006 expand_builtin_setjmp_setup (buf_addr, label_r);
6007 nonlocal_goto_handler_labels
6008 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6009 nonlocal_goto_handler_labels);
6010 /* ??? Do not let expand_label treat us as such since we would
6011 not want to be both on the list of non-local labels and on
6012 the list of forced labels. */
6013 FORCED_LABEL (label) = 0;
6018 case BUILT_IN_SETJMP_DISPATCHER:
6019 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6020 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6022 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6023 rtx label_r = label_rtx (label);
6025 /* Remove the dispatcher label from the list of non-local labels
6026 since the receiver labels have been added to it above. */
6027 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6032 case BUILT_IN_SETJMP_RECEIVER:
6033 /* __builtin_setjmp_receiver is passed the receiver label. */
6034 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6036 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6037 rtx label_r = label_rtx (label);
6039 expand_builtin_setjmp_receiver (label_r);
6044 /* __builtin_longjmp is passed a pointer to an array of five words.
6045 It's similar to the C library longjmp function but works with
6046 __builtin_setjmp above. */
6047 case BUILT_IN_LONGJMP:
6048 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6050 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6051 VOIDmode, EXPAND_NORMAL);
6052 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6054 if (value != const1_rtx)
6056 error ("%<__builtin_longjmp%> second argument must be 1");
6060 expand_builtin_longjmp (buf_addr, value);
6065 case BUILT_IN_NONLOCAL_GOTO:
6066 target = expand_builtin_nonlocal_goto (exp);
6071 /* This updates the setjmp buffer that is its argument with the value
6072 of the current stack pointer. */
6073 case BUILT_IN_UPDATE_SETJMP_BUF:
6074 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6077 = expand_normal (CALL_EXPR_ARG (exp, 0));
6079 expand_builtin_update_setjmp_buf (buf_addr);
6085 expand_builtin_trap ();
6088 case BUILT_IN_UNREACHABLE:
6089 expand_builtin_unreachable ();
6092 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6093 case BUILT_IN_SIGNBITD32:
6094 case BUILT_IN_SIGNBITD64:
6095 case BUILT_IN_SIGNBITD128:
6096 target = expand_builtin_signbit (exp, target);
6101 /* Various hooks for the DWARF 2 __throw routine. */
6102 case BUILT_IN_UNWIND_INIT:
6103 expand_builtin_unwind_init ();
6105 case BUILT_IN_DWARF_CFA:
6106 return virtual_cfa_rtx;
6107 #ifdef DWARF2_UNWIND_INFO
6108 case BUILT_IN_DWARF_SP_COLUMN:
6109 return expand_builtin_dwarf_sp_column ();
6110 case BUILT_IN_INIT_DWARF_REG_SIZES:
6111 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6114 case BUILT_IN_FROB_RETURN_ADDR:
6115 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6116 case BUILT_IN_EXTRACT_RETURN_ADDR:
6117 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6118 case BUILT_IN_EH_RETURN:
6119 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6120 CALL_EXPR_ARG (exp, 1));
6122 #ifdef EH_RETURN_DATA_REGNO
6123 case BUILT_IN_EH_RETURN_DATA_REGNO:
6124 return expand_builtin_eh_return_data_regno (exp);
6126 case BUILT_IN_EXTEND_POINTER:
6127 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6128 case BUILT_IN_EH_POINTER:
6129 return expand_builtin_eh_pointer (exp);
6130 case BUILT_IN_EH_FILTER:
6131 return expand_builtin_eh_filter (exp);
6132 case BUILT_IN_EH_COPY_VALUES:
6133 return expand_builtin_eh_copy_values (exp);
6135 case BUILT_IN_VA_START:
6136 return expand_builtin_va_start (exp);
6137 case BUILT_IN_VA_END:
6138 return expand_builtin_va_end (exp);
6139 case BUILT_IN_VA_COPY:
6140 return expand_builtin_va_copy (exp);
6141 case BUILT_IN_EXPECT:
6142 return expand_builtin_expect (exp, target);
6143 case BUILT_IN_PREFETCH:
6144 expand_builtin_prefetch (exp);
6147 case BUILT_IN_PROFILE_FUNC_ENTER:
6148 return expand_builtin_profile_func (false);
6149 case BUILT_IN_PROFILE_FUNC_EXIT:
6150 return expand_builtin_profile_func (true);
6152 case BUILT_IN_INIT_TRAMPOLINE:
6153 return expand_builtin_init_trampoline (exp);
6154 case BUILT_IN_ADJUST_TRAMPOLINE:
6155 return expand_builtin_adjust_trampoline (exp);
6158 case BUILT_IN_EXECL:
6159 case BUILT_IN_EXECV:
6160 case BUILT_IN_EXECLP:
6161 case BUILT_IN_EXECLE:
6162 case BUILT_IN_EXECVP:
6163 case BUILT_IN_EXECVE:
6164 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6169 case BUILT_IN_FETCH_AND_ADD_1:
6170 case BUILT_IN_FETCH_AND_ADD_2:
6171 case BUILT_IN_FETCH_AND_ADD_4:
6172 case BUILT_IN_FETCH_AND_ADD_8:
6173 case BUILT_IN_FETCH_AND_ADD_16:
6174 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6175 target = expand_builtin_sync_operation (mode, exp, PLUS,
6176 false, target, ignore);
6181 case BUILT_IN_FETCH_AND_SUB_1:
6182 case BUILT_IN_FETCH_AND_SUB_2:
6183 case BUILT_IN_FETCH_AND_SUB_4:
6184 case BUILT_IN_FETCH_AND_SUB_8:
6185 case BUILT_IN_FETCH_AND_SUB_16:
6186 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6187 target = expand_builtin_sync_operation (mode, exp, MINUS,
6188 false, target, ignore);
6193 case BUILT_IN_FETCH_AND_OR_1:
6194 case BUILT_IN_FETCH_AND_OR_2:
6195 case BUILT_IN_FETCH_AND_OR_4:
6196 case BUILT_IN_FETCH_AND_OR_8:
6197 case BUILT_IN_FETCH_AND_OR_16:
6198 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6199 target = expand_builtin_sync_operation (mode, exp, IOR,
6200 false, target, ignore);
6205 case BUILT_IN_FETCH_AND_AND_1:
6206 case BUILT_IN_FETCH_AND_AND_2:
6207 case BUILT_IN_FETCH_AND_AND_4:
6208 case BUILT_IN_FETCH_AND_AND_8:
6209 case BUILT_IN_FETCH_AND_AND_16:
6210 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6211 target = expand_builtin_sync_operation (mode, exp, AND,
6212 false, target, ignore);
6217 case BUILT_IN_FETCH_AND_XOR_1:
6218 case BUILT_IN_FETCH_AND_XOR_2:
6219 case BUILT_IN_FETCH_AND_XOR_4:
6220 case BUILT_IN_FETCH_AND_XOR_8:
6221 case BUILT_IN_FETCH_AND_XOR_16:
6222 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6223 target = expand_builtin_sync_operation (mode, exp, XOR,
6224 false, target, ignore);
6229 case BUILT_IN_FETCH_AND_NAND_1:
6230 case BUILT_IN_FETCH_AND_NAND_2:
6231 case BUILT_IN_FETCH_AND_NAND_4:
6232 case BUILT_IN_FETCH_AND_NAND_8:
6233 case BUILT_IN_FETCH_AND_NAND_16:
6234 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6235 target = expand_builtin_sync_operation (mode, exp, NOT,
6236 false, target, ignore);
6241 case BUILT_IN_ADD_AND_FETCH_1:
6242 case BUILT_IN_ADD_AND_FETCH_2:
6243 case BUILT_IN_ADD_AND_FETCH_4:
6244 case BUILT_IN_ADD_AND_FETCH_8:
6245 case BUILT_IN_ADD_AND_FETCH_16:
6246 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6247 target = expand_builtin_sync_operation (mode, exp, PLUS,
6248 true, target, ignore);
6253 case BUILT_IN_SUB_AND_FETCH_1:
6254 case BUILT_IN_SUB_AND_FETCH_2:
6255 case BUILT_IN_SUB_AND_FETCH_4:
6256 case BUILT_IN_SUB_AND_FETCH_8:
6257 case BUILT_IN_SUB_AND_FETCH_16:
6258 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6259 target = expand_builtin_sync_operation (mode, exp, MINUS,
6260 true, target, ignore);
6265 case BUILT_IN_OR_AND_FETCH_1:
6266 case BUILT_IN_OR_AND_FETCH_2:
6267 case BUILT_IN_OR_AND_FETCH_4:
6268 case BUILT_IN_OR_AND_FETCH_8:
6269 case BUILT_IN_OR_AND_FETCH_16:
6270 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6271 target = expand_builtin_sync_operation (mode, exp, IOR,
6272 true, target, ignore);
6277 case BUILT_IN_AND_AND_FETCH_1:
6278 case BUILT_IN_AND_AND_FETCH_2:
6279 case BUILT_IN_AND_AND_FETCH_4:
6280 case BUILT_IN_AND_AND_FETCH_8:
6281 case BUILT_IN_AND_AND_FETCH_16:
6282 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6283 target = expand_builtin_sync_operation (mode, exp, AND,
6284 true, target, ignore);
6289 case BUILT_IN_XOR_AND_FETCH_1:
6290 case BUILT_IN_XOR_AND_FETCH_2:
6291 case BUILT_IN_XOR_AND_FETCH_4:
6292 case BUILT_IN_XOR_AND_FETCH_8:
6293 case BUILT_IN_XOR_AND_FETCH_16:
6294 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6295 target = expand_builtin_sync_operation (mode, exp, XOR,
6296 true, target, ignore);
6301 case BUILT_IN_NAND_AND_FETCH_1:
6302 case BUILT_IN_NAND_AND_FETCH_2:
6303 case BUILT_IN_NAND_AND_FETCH_4:
6304 case BUILT_IN_NAND_AND_FETCH_8:
6305 case BUILT_IN_NAND_AND_FETCH_16:
6306 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6307 target = expand_builtin_sync_operation (mode, exp, NOT,
6308 true, target, ignore);
6313 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6314 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6315 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6316 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6317 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6318 if (mode == VOIDmode)
6319 mode = TYPE_MODE (boolean_type_node);
6320 if (!target || !register_operand (target, mode))
6321 target = gen_reg_rtx (mode);
6323 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6324 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6329 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6330 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6331 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6332 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6333 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6335 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6340 case BUILT_IN_LOCK_TEST_AND_SET_1:
6341 case BUILT_IN_LOCK_TEST_AND_SET_2:
6342 case BUILT_IN_LOCK_TEST_AND_SET_4:
6343 case BUILT_IN_LOCK_TEST_AND_SET_8:
6344 case BUILT_IN_LOCK_TEST_AND_SET_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6346 target = expand_builtin_lock_test_and_set (mode, exp, target);
6351 case BUILT_IN_LOCK_RELEASE_1:
6352 case BUILT_IN_LOCK_RELEASE_2:
6353 case BUILT_IN_LOCK_RELEASE_4:
6354 case BUILT_IN_LOCK_RELEASE_8:
6355 case BUILT_IN_LOCK_RELEASE_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6357 expand_builtin_lock_release (mode, exp);
6360 case BUILT_IN_SYNCHRONIZE:
6361 expand_builtin_synchronize ();
6364 case BUILT_IN_OBJECT_SIZE:
6365 return expand_builtin_object_size (exp);
6367 case BUILT_IN_MEMCPY_CHK:
6368 case BUILT_IN_MEMPCPY_CHK:
6369 case BUILT_IN_MEMMOVE_CHK:
6370 case BUILT_IN_MEMSET_CHK:
6371 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6376 case BUILT_IN_STRCPY_CHK:
6377 case BUILT_IN_STPCPY_CHK:
6378 case BUILT_IN_STRNCPY_CHK:
6379 case BUILT_IN_STRCAT_CHK:
6380 case BUILT_IN_STRNCAT_CHK:
6381 case BUILT_IN_SNPRINTF_CHK:
6382 case BUILT_IN_VSNPRINTF_CHK:
6383 maybe_emit_chk_warning (exp, fcode);
6386 case BUILT_IN_SPRINTF_CHK:
6387 case BUILT_IN_VSPRINTF_CHK:
6388 maybe_emit_sprintf_chk_warning (exp, fcode);
6392 maybe_emit_free_warning (exp);
6395 default: /* just do library call, if unknown builtin */
6399 /* The switch statement above can drop through to cause the function
6400 to be called normally. */
6401 return expand_call (exp, target, ignore);
6404 /* Determine whether a tree node represents a call to a built-in
6405 function. If the tree T is a call to a built-in function with
6406 the right number of arguments of the appropriate types, return
6407 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6408 Otherwise the return value is END_BUILTINS. */
6410 enum built_in_function
6411 builtin_mathfn_code (const_tree t)
6413 const_tree fndecl, arg, parmlist;
6414 const_tree argtype, parmtype;
6415 const_call_expr_arg_iterator iter;
6417 if (TREE_CODE (t) != CALL_EXPR
6418 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6419 return END_BUILTINS;
6421 fndecl = get_callee_fndecl (t);
6422 if (fndecl == NULL_TREE
6423 || TREE_CODE (fndecl) != FUNCTION_DECL
6424 || ! DECL_BUILT_IN (fndecl)
6425 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6426 return END_BUILTINS;
6428 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6429 init_const_call_expr_arg_iterator (t, &iter);
6430 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6432 /* If a function doesn't take a variable number of arguments,
6433 the last element in the list will have type `void'. */
6434 parmtype = TREE_VALUE (parmlist);
6435 if (VOID_TYPE_P (parmtype))
6437 if (more_const_call_expr_args_p (&iter))
6438 return END_BUILTINS;
6439 return DECL_FUNCTION_CODE (fndecl);
6442 if (! more_const_call_expr_args_p (&iter))
6443 return END_BUILTINS;
6445 arg = next_const_call_expr_arg (&iter);
6446 argtype = TREE_TYPE (arg);
6448 if (SCALAR_FLOAT_TYPE_P (parmtype))
6450 if (! SCALAR_FLOAT_TYPE_P (argtype))
6451 return END_BUILTINS;
6453 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6455 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6456 return END_BUILTINS;
6458 else if (POINTER_TYPE_P (parmtype))
6460 if (! POINTER_TYPE_P (argtype))
6461 return END_BUILTINS;
6463 else if (INTEGRAL_TYPE_P (parmtype))
6465 if (! INTEGRAL_TYPE_P (argtype))
6466 return END_BUILTINS;
6469 return END_BUILTINS;
6472 /* Variable-length argument list. */
6473 return DECL_FUNCTION_CODE (fndecl);
6476 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6477 evaluate to a constant. */
6480 fold_builtin_constant_p (tree arg)
6482 /* We return 1 for a numeric type that's known to be a constant
6483 value at compile-time or for an aggregate type that's a
6484 literal constant. */
6487 /* If we know this is a constant, emit the constant of one. */
6488 if (CONSTANT_CLASS_P (arg)
6489 || (TREE_CODE (arg) == CONSTRUCTOR
6490 && TREE_CONSTANT (arg)))
6491 return integer_one_node;
6492 if (TREE_CODE (arg) == ADDR_EXPR)
6494 tree op = TREE_OPERAND (arg, 0);
6495 if (TREE_CODE (op) == STRING_CST
6496 || (TREE_CODE (op) == ARRAY_REF
6497 && integer_zerop (TREE_OPERAND (op, 1))
6498 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6499 return integer_one_node;
6502 /* If this expression has side effects, show we don't know it to be a
6503 constant. Likewise if it's a pointer or aggregate type since in
6504 those case we only want literals, since those are only optimized
6505 when generating RTL, not later.
6506 And finally, if we are compiling an initializer, not code, we
6507 need to return a definite result now; there's not going to be any
6508 more optimization done. */
6509 if (TREE_SIDE_EFFECTS (arg)
6510 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6511 || POINTER_TYPE_P (TREE_TYPE (arg))
6513 || folding_initializer)
6514 return integer_zero_node;
6519 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6520 return it as a truthvalue. */
6523 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6525 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6527 fn = built_in_decls[BUILT_IN_EXPECT];
6528 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6529 ret_type = TREE_TYPE (TREE_TYPE (fn));
6530 pred_type = TREE_VALUE (arg_types);
6531 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6533 pred = fold_convert_loc (loc, pred_type, pred);
6534 expected = fold_convert_loc (loc, expected_type, expected);
6535 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6537 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6538 build_int_cst (ret_type, 0));
6541 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6542 NULL_TREE if no simplification is possible. */
6545 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6548 enum tree_code code;
6550 /* If this is a builtin_expect within a builtin_expect keep the
6551 inner one. See through a comparison against a constant. It
6552 might have been added to create a thruthvalue. */
6554 if (COMPARISON_CLASS_P (inner)
6555 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6556 inner = TREE_OPERAND (inner, 0);
6558 if (TREE_CODE (inner) == CALL_EXPR
6559 && (fndecl = get_callee_fndecl (inner))
6560 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6561 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6564 /* Distribute the expected value over short-circuiting operators.
6565 See through the cast from truthvalue_type_node to long. */
6567 while (TREE_CODE (inner) == NOP_EXPR
6568 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6569 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6570 inner = TREE_OPERAND (inner, 0);
6572 code = TREE_CODE (inner);
6573 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6575 tree op0 = TREE_OPERAND (inner, 0);
6576 tree op1 = TREE_OPERAND (inner, 1);
6578 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6579 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6580 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6582 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6585 /* If the argument isn't invariant then there's nothing else we can do. */
6586 if (!TREE_CONSTANT (arg0))
6589 /* If we expect that a comparison against the argument will fold to
6590 a constant return the constant. In practice, this means a true
6591 constant or the address of a non-weak symbol. */
6594 if (TREE_CODE (inner) == ADDR_EXPR)
6598 inner = TREE_OPERAND (inner, 0);
6600 while (TREE_CODE (inner) == COMPONENT_REF
6601 || TREE_CODE (inner) == ARRAY_REF);
6602 if ((TREE_CODE (inner) == VAR_DECL
6603 || TREE_CODE (inner) == FUNCTION_DECL)
6604 && DECL_WEAK (inner))
6608 /* Otherwise, ARG0 already has the proper type for the return value. */
6612 /* Fold a call to __builtin_classify_type with argument ARG. */
6615 fold_builtin_classify_type (tree arg)
6618 return build_int_cst (NULL_TREE, no_type_class);
6620 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6623 /* Fold a call to __builtin_strlen with argument ARG. */
6626 fold_builtin_strlen (location_t loc, tree type, tree arg)
6628 if (!validate_arg (arg, POINTER_TYPE))
6632 tree len = c_strlen (arg, 0);
6635 return fold_convert_loc (loc, type, len);
6641 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6644 fold_builtin_inf (location_t loc, tree type, int warn)
6646 REAL_VALUE_TYPE real;
6648 /* __builtin_inff is intended to be usable to define INFINITY on all
6649 targets. If an infinity is not available, INFINITY expands "to a
6650 positive constant of type float that overflows at translation
6651 time", footnote "In this case, using INFINITY will violate the
6652 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6653 Thus we pedwarn to ensure this constraint violation is
6655 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6656 pedwarn (loc, 0, "target format does not support infinity");
6659 return build_real (type, real);
6662 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6665 fold_builtin_nan (tree arg, tree type, int quiet)
6667 REAL_VALUE_TYPE real;
6670 if (!validate_arg (arg, POINTER_TYPE))
6672 str = c_getstr (arg);
6676 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6679 return build_real (type, real);
6682 /* Return true if the floating point expression T has an integer value.
6683 We also allow +Inf, -Inf and NaN to be considered integer values. */
6686 integer_valued_real_p (tree t)
6688 switch (TREE_CODE (t))
6695 return integer_valued_real_p (TREE_OPERAND (t, 0));
6700 return integer_valued_real_p (TREE_OPERAND (t, 1));
6707 return integer_valued_real_p (TREE_OPERAND (t, 0))
6708 && integer_valued_real_p (TREE_OPERAND (t, 1));
6711 return integer_valued_real_p (TREE_OPERAND (t, 1))
6712 && integer_valued_real_p (TREE_OPERAND (t, 2));
6715 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6719 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6720 if (TREE_CODE (type) == INTEGER_TYPE)
6722 if (TREE_CODE (type) == REAL_TYPE)
6723 return integer_valued_real_p (TREE_OPERAND (t, 0));
6728 switch (builtin_mathfn_code (t))
6730 CASE_FLT_FN (BUILT_IN_CEIL):
6731 CASE_FLT_FN (BUILT_IN_FLOOR):
6732 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6733 CASE_FLT_FN (BUILT_IN_RINT):
6734 CASE_FLT_FN (BUILT_IN_ROUND):
6735 CASE_FLT_FN (BUILT_IN_TRUNC):
6738 CASE_FLT_FN (BUILT_IN_FMIN):
6739 CASE_FLT_FN (BUILT_IN_FMAX):
6740 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6741 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6754 /* FNDECL is assumed to be a builtin where truncation can be propagated
6755 across (for instance floor((double)f) == (double)floorf (f).
6756 Do the transformation for a call with argument ARG. */
6759 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6761 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6763 if (!validate_arg (arg, REAL_TYPE))
6766 /* Integer rounding functions are idempotent. */
6767 if (fcode == builtin_mathfn_code (arg))
6770 /* If argument is already integer valued, and we don't need to worry
6771 about setting errno, there's no need to perform rounding. */
6772 if (! flag_errno_math && integer_valued_real_p (arg))
6777 tree arg0 = strip_float_extensions (arg);
6778 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6779 tree newtype = TREE_TYPE (arg0);
6782 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6783 && (decl = mathfn_built_in (newtype, fcode)))
6784 return fold_convert_loc (loc, ftype,
6785 build_call_expr_loc (loc, decl, 1,
6786 fold_convert_loc (loc,
6793 /* FNDECL is assumed to be builtin which can narrow the FP type of
6794 the argument, for instance lround((double)f) -> lroundf (f).
6795 Do the transformation for a call with argument ARG. */
6798 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6800 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6802 if (!validate_arg (arg, REAL_TYPE))
6805 /* If argument is already integer valued, and we don't need to worry
6806 about setting errno, there's no need to perform rounding. */
6807 if (! flag_errno_math && integer_valued_real_p (arg))
6808 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6809 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6813 tree ftype = TREE_TYPE (arg);
6814 tree arg0 = strip_float_extensions (arg);
6815 tree newtype = TREE_TYPE (arg0);
6818 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6819 && (decl = mathfn_built_in (newtype, fcode)))
6820 return build_call_expr_loc (loc, decl, 1,
6821 fold_convert_loc (loc, newtype, arg0));
6824 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6825 sizeof (long long) == sizeof (long). */
6826 if (TYPE_PRECISION (long_long_integer_type_node)
6827 == TYPE_PRECISION (long_integer_type_node))
6829 tree newfn = NULL_TREE;
6832 CASE_FLT_FN (BUILT_IN_LLCEIL):
6833 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6836 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6837 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6840 CASE_FLT_FN (BUILT_IN_LLROUND):
6841 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6844 CASE_FLT_FN (BUILT_IN_LLRINT):
6845 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6854 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6855 return fold_convert_loc (loc,
6856 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6863 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6864 return type. Return NULL_TREE if no simplification can be made. */
6867 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6871 if (!validate_arg (arg, COMPLEX_TYPE)
6872 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6875 /* Calculate the result when the argument is a constant. */
6876 if (TREE_CODE (arg) == COMPLEX_CST
6877 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6881 if (TREE_CODE (arg) == COMPLEX_EXPR)
6883 tree real = TREE_OPERAND (arg, 0);
6884 tree imag = TREE_OPERAND (arg, 1);
6886 /* If either part is zero, cabs is fabs of the other. */
6887 if (real_zerop (real))
6888 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6889 if (real_zerop (imag))
6890 return fold_build1_loc (loc, ABS_EXPR, type, real);
6892 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6893 if (flag_unsafe_math_optimizations
6894 && operand_equal_p (real, imag, OEP_PURE_SAME))
6896 const REAL_VALUE_TYPE sqrt2_trunc
6897 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6899 return fold_build2_loc (loc, MULT_EXPR, type,
6900 fold_build1_loc (loc, ABS_EXPR, type, real),
6901 build_real (type, sqrt2_trunc));
6905 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6906 if (TREE_CODE (arg) == NEGATE_EXPR
6907 || TREE_CODE (arg) == CONJ_EXPR)
6908 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6910 /* Don't do this when optimizing for size. */
6911 if (flag_unsafe_math_optimizations
6912 && optimize && optimize_function_for_speed_p (cfun))
6914 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6916 if (sqrtfn != NULL_TREE)
6918 tree rpart, ipart, result;
6920 arg = builtin_save_expr (arg);
6922 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6923 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6925 rpart = builtin_save_expr (rpart);
6926 ipart = builtin_save_expr (ipart);
6928 result = fold_build2_loc (loc, PLUS_EXPR, type,
6929 fold_build2_loc (loc, MULT_EXPR, type,
6931 fold_build2_loc (loc, MULT_EXPR, type,
6934 return build_call_expr_loc (loc, sqrtfn, 1, result);
6941 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6942 Return NULL_TREE if no simplification can be made. */
6945 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6948 enum built_in_function fcode;
6951 if (!validate_arg (arg, REAL_TYPE))
6954 /* Calculate the result when the argument is a constant. */
6955 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6958 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6959 fcode = builtin_mathfn_code (arg);
6960 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6962 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6963 arg = fold_build2_loc (loc, MULT_EXPR, type,
6964 CALL_EXPR_ARG (arg, 0),
6965 build_real (type, dconsthalf));
6966 return build_call_expr_loc (loc, expfn, 1, arg);
6969 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6970 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6972 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6976 tree arg0 = CALL_EXPR_ARG (arg, 0);
6978 /* The inner root was either sqrt or cbrt. */
6979 /* This was a conditional expression but it triggered a bug
6981 REAL_VALUE_TYPE dconstroot;
6982 if (BUILTIN_SQRT_P (fcode))
6983 dconstroot = dconsthalf;
6985 dconstroot = dconst_third ();
6987 /* Adjust for the outer root. */
6988 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6989 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6990 tree_root = build_real (type, dconstroot);
6991 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6995 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6996 if (flag_unsafe_math_optimizations
6997 && (fcode == BUILT_IN_POW
6998 || fcode == BUILT_IN_POWF
6999 || fcode == BUILT_IN_POWL))
7001 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7002 tree arg0 = CALL_EXPR_ARG (arg, 0);
7003 tree arg1 = CALL_EXPR_ARG (arg, 1);
7005 if (!tree_expr_nonnegative_p (arg0))
7006 arg0 = build1 (ABS_EXPR, type, arg0);
7007 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7008 build_real (type, dconsthalf));
7009 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7015 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7016 Return NULL_TREE if no simplification can be made. */
7019 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7021 const enum built_in_function fcode = builtin_mathfn_code (arg);
7024 if (!validate_arg (arg, REAL_TYPE))
7027 /* Calculate the result when the argument is a constant. */
7028 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7031 if (flag_unsafe_math_optimizations)
7033 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7034 if (BUILTIN_EXPONENT_P (fcode))
7036 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7037 const REAL_VALUE_TYPE third_trunc =
7038 real_value_truncate (TYPE_MODE (type), dconst_third ());
7039 arg = fold_build2_loc (loc, MULT_EXPR, type,
7040 CALL_EXPR_ARG (arg, 0),
7041 build_real (type, third_trunc));
7042 return build_call_expr_loc (loc, expfn, 1, arg);
7045 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7046 if (BUILTIN_SQRT_P (fcode))
7048 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7052 tree arg0 = CALL_EXPR_ARG (arg, 0);
7054 REAL_VALUE_TYPE dconstroot = dconst_third ();
7056 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7057 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7058 tree_root = build_real (type, dconstroot);
7059 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7063 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7064 if (BUILTIN_CBRT_P (fcode))
7066 tree arg0 = CALL_EXPR_ARG (arg, 0);
7067 if (tree_expr_nonnegative_p (arg0))
7069 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7074 REAL_VALUE_TYPE dconstroot;
7076 real_arithmetic (&dconstroot, MULT_EXPR,
7077 dconst_third_ptr (), dconst_third_ptr ());
7078 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7079 tree_root = build_real (type, dconstroot);
7080 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7085 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7086 if (fcode == BUILT_IN_POW
7087 || fcode == BUILT_IN_POWF
7088 || fcode == BUILT_IN_POWL)
7090 tree arg00 = CALL_EXPR_ARG (arg, 0);
7091 tree arg01 = CALL_EXPR_ARG (arg, 1);
7092 if (tree_expr_nonnegative_p (arg00))
7094 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7095 const REAL_VALUE_TYPE dconstroot
7096 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7097 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7098 build_real (type, dconstroot));
7099 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7106 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7107 TYPE is the type of the return value. Return NULL_TREE if no
7108 simplification can be made. */
7111 fold_builtin_cos (location_t loc,
7112 tree arg, tree type, tree fndecl)
7116 if (!validate_arg (arg, REAL_TYPE))
7119 /* Calculate the result when the argument is a constant. */
7120 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7123 /* Optimize cos(-x) into cos (x). */
7124 if ((narg = fold_strip_sign_ops (arg)))
7125 return build_call_expr_loc (loc, fndecl, 1, narg);
7130 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7131 Return NULL_TREE if no simplification can be made. */
7134 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7136 if (validate_arg (arg, REAL_TYPE))
7140 /* Calculate the result when the argument is a constant. */
7141 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7144 /* Optimize cosh(-x) into cosh (x). */
7145 if ((narg = fold_strip_sign_ops (arg)))
7146 return build_call_expr_loc (loc, fndecl, 1, narg);
7152 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7153 argument ARG. TYPE is the type of the return value. Return
7154 NULL_TREE if no simplification can be made. */
7157 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7160 if (validate_arg (arg, COMPLEX_TYPE)
7161 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7165 /* Calculate the result when the argument is a constant. */
7166 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7169 /* Optimize fn(-x) into fn(x). */
7170 if ((tmp = fold_strip_sign_ops (arg)))
7171 return build_call_expr_loc (loc, fndecl, 1, tmp);
7177 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7178 Return NULL_TREE if no simplification can be made. */
7181 fold_builtin_tan (tree arg, tree type)
7183 enum built_in_function fcode;
7186 if (!validate_arg (arg, REAL_TYPE))
7189 /* Calculate the result when the argument is a constant. */
7190 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7193 /* Optimize tan(atan(x)) = x. */
7194 fcode = builtin_mathfn_code (arg);
7195 if (flag_unsafe_math_optimizations
7196 && (fcode == BUILT_IN_ATAN
7197 || fcode == BUILT_IN_ATANF
7198 || fcode == BUILT_IN_ATANL))
7199 return CALL_EXPR_ARG (arg, 0);
7204 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7205 NULL_TREE if no simplification can be made. */
7208 fold_builtin_sincos (location_t loc,
7209 tree arg0, tree arg1, tree arg2)
7214 if (!validate_arg (arg0, REAL_TYPE)
7215 || !validate_arg (arg1, POINTER_TYPE)
7216 || !validate_arg (arg2, POINTER_TYPE))
7219 type = TREE_TYPE (arg0);
7221 /* Calculate the result when the argument is a constant. */
7222 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7225 /* Canonicalize sincos to cexpi. */
7226 if (!TARGET_C99_FUNCTIONS)
7228 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7232 call = build_call_expr_loc (loc, fn, 1, arg0);
7233 call = builtin_save_expr (call);
7235 return build2 (COMPOUND_EXPR, void_type_node,
7236 build2 (MODIFY_EXPR, void_type_node,
7237 build_fold_indirect_ref_loc (loc, arg1),
7238 build1 (IMAGPART_EXPR, type, call)),
7239 build2 (MODIFY_EXPR, void_type_node,
7240 build_fold_indirect_ref_loc (loc, arg2),
7241 build1 (REALPART_EXPR, type, call)));
7244 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7245 NULL_TREE if no simplification can be made. */
7248 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7251 tree realp, imagp, ifn;
7254 if (!validate_arg (arg0, COMPLEX_TYPE)
7255 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7258 /* Calculate the result when the argument is a constant. */
7259 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7262 rtype = TREE_TYPE (TREE_TYPE (arg0));
7264 /* In case we can figure out the real part of arg0 and it is constant zero
7266 if (!TARGET_C99_FUNCTIONS)
7268 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7272 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7273 && real_zerop (realp))
7275 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7276 return build_call_expr_loc (loc, ifn, 1, narg);
7279 /* In case we can easily decompose real and imaginary parts split cexp
7280 to exp (r) * cexpi (i). */
7281 if (flag_unsafe_math_optimizations
7284 tree rfn, rcall, icall;
7286 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7290 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7294 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7295 icall = builtin_save_expr (icall);
7296 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7297 rcall = builtin_save_expr (rcall);
7298 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7299 fold_build2_loc (loc, MULT_EXPR, rtype,
7301 fold_build1_loc (loc, REALPART_EXPR,
7303 fold_build2_loc (loc, MULT_EXPR, rtype,
7305 fold_build1_loc (loc, IMAGPART_EXPR,
7312 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7313 Return NULL_TREE if no simplification can be made. */
7316 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7318 if (!validate_arg (arg, REAL_TYPE))
7321 /* Optimize trunc of constant value. */
7322 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7324 REAL_VALUE_TYPE r, x;
7325 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7327 x = TREE_REAL_CST (arg);
7328 real_trunc (&r, TYPE_MODE (type), &x);
7329 return build_real (type, r);
7332 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7335 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7336 Return NULL_TREE if no simplification can be made. */
7339 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7341 if (!validate_arg (arg, REAL_TYPE))
7344 /* Optimize floor of constant value. */
7345 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7349 x = TREE_REAL_CST (arg);
7350 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7352 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7355 real_floor (&r, TYPE_MODE (type), &x);
7356 return build_real (type, r);
7360 /* Fold floor (x) where x is nonnegative to trunc (x). */
7361 if (tree_expr_nonnegative_p (arg))
7363 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7365 return build_call_expr_loc (loc, truncfn, 1, arg);
7368 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7371 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7372 Return NULL_TREE if no simplification can be made. */
7375 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7377 if (!validate_arg (arg, REAL_TYPE))
7380 /* Optimize ceil of constant value. */
7381 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7385 x = TREE_REAL_CST (arg);
7386 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7388 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7391 real_ceil (&r, TYPE_MODE (type), &x);
7392 return build_real (type, r);
7396 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7399 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7400 Return NULL_TREE if no simplification can be made. */
7403 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7405 if (!validate_arg (arg, REAL_TYPE))
7408 /* Optimize round of constant value. */
7409 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7413 x = TREE_REAL_CST (arg);
7414 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7416 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7419 real_round (&r, TYPE_MODE (type), &x);
7420 return build_real (type, r);
7424 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7427 /* Fold function call to builtin lround, lroundf or lroundl (or the
7428 corresponding long long versions) and other rounding functions. ARG
7429 is the argument to the call. Return NULL_TREE if no simplification
7433 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7435 if (!validate_arg (arg, REAL_TYPE))
7438 /* Optimize lround of constant value. */
7439 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7441 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7443 if (real_isfinite (&x))
7445 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7446 tree ftype = TREE_TYPE (arg);
7447 unsigned HOST_WIDE_INT lo2;
7448 HOST_WIDE_INT hi, lo;
7451 switch (DECL_FUNCTION_CODE (fndecl))
7453 CASE_FLT_FN (BUILT_IN_LFLOOR):
7454 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7455 real_floor (&r, TYPE_MODE (ftype), &x);
7458 CASE_FLT_FN (BUILT_IN_LCEIL):
7459 CASE_FLT_FN (BUILT_IN_LLCEIL):
7460 real_ceil (&r, TYPE_MODE (ftype), &x);
7463 CASE_FLT_FN (BUILT_IN_LROUND):
7464 CASE_FLT_FN (BUILT_IN_LLROUND):
7465 real_round (&r, TYPE_MODE (ftype), &x);
7472 REAL_VALUE_TO_INT (&lo, &hi, r);
7473 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7474 return build_int_cst_wide (itype, lo2, hi);
7478 switch (DECL_FUNCTION_CODE (fndecl))
7480 CASE_FLT_FN (BUILT_IN_LFLOOR):
7481 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7482 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7483 if (tree_expr_nonnegative_p (arg))
7484 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7485 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7490 return fold_fixed_mathfn (loc, fndecl, arg);
7493 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7494 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7495 the argument to the call. Return NULL_TREE if no simplification can
7499 fold_builtin_bitop (tree fndecl, tree arg)
7501 if (!validate_arg (arg, INTEGER_TYPE))
7504 /* Optimize for constant argument. */
7505 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7507 HOST_WIDE_INT hi, width, result;
7508 unsigned HOST_WIDE_INT lo;
7511 type = TREE_TYPE (arg);
7512 width = TYPE_PRECISION (type);
7513 lo = TREE_INT_CST_LOW (arg);
7515 /* Clear all the bits that are beyond the type's precision. */
7516 if (width > HOST_BITS_PER_WIDE_INT)
7518 hi = TREE_INT_CST_HIGH (arg);
7519 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7520 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7525 if (width < HOST_BITS_PER_WIDE_INT)
7526 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7529 switch (DECL_FUNCTION_CODE (fndecl))
7531 CASE_INT_FN (BUILT_IN_FFS):
7533 result = exact_log2 (lo & -lo) + 1;
7535 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7540 CASE_INT_FN (BUILT_IN_CLZ):
7542 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7544 result = width - floor_log2 (lo) - 1;
7545 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7549 CASE_INT_FN (BUILT_IN_CTZ):
7551 result = exact_log2 (lo & -lo);
7553 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7554 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7558 CASE_INT_FN (BUILT_IN_POPCOUNT):
7561 result++, lo &= lo - 1;
7563 result++, hi &= hi - 1;
7566 CASE_INT_FN (BUILT_IN_PARITY):
7569 result++, lo &= lo - 1;
7571 result++, hi &= hi - 1;
7579 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7585 /* Fold function call to builtin_bswap and the long and long long
7586 variants. Return NULL_TREE if no simplification can be made. */
7588 fold_builtin_bswap (tree fndecl, tree arg)
7590 if (! validate_arg (arg, INTEGER_TYPE))
7593 /* Optimize constant value. */
7594 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7596 HOST_WIDE_INT hi, width, r_hi = 0;
7597 unsigned HOST_WIDE_INT lo, r_lo = 0;
7600 type = TREE_TYPE (arg);
7601 width = TYPE_PRECISION (type);
7602 lo = TREE_INT_CST_LOW (arg);
7603 hi = TREE_INT_CST_HIGH (arg);
7605 switch (DECL_FUNCTION_CODE (fndecl))
7607 case BUILT_IN_BSWAP32:
7608 case BUILT_IN_BSWAP64:
7612 for (s = 0; s < width; s += 8)
7614 int d = width - s - 8;
7615 unsigned HOST_WIDE_INT byte;
7617 if (s < HOST_BITS_PER_WIDE_INT)
7618 byte = (lo >> s) & 0xff;
7620 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7622 if (d < HOST_BITS_PER_WIDE_INT)
7625 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7635 if (width < HOST_BITS_PER_WIDE_INT)
7636 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7638 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7644 /* A subroutine of fold_builtin to fold the various logarithmic
7645 functions. Return NULL_TREE if no simplification can me made.
7646 FUNC is the corresponding MPFR logarithm function. */
7649 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7650 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7652 if (validate_arg (arg, REAL_TYPE))
7654 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7656 const enum built_in_function fcode = builtin_mathfn_code (arg);
7658 /* Calculate the result when the argument is a constant. */
7659 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7662 /* Special case, optimize logN(expN(x)) = x. */
7663 if (flag_unsafe_math_optimizations
7664 && ((func == mpfr_log
7665 && (fcode == BUILT_IN_EXP
7666 || fcode == BUILT_IN_EXPF
7667 || fcode == BUILT_IN_EXPL))
7668 || (func == mpfr_log2
7669 && (fcode == BUILT_IN_EXP2
7670 || fcode == BUILT_IN_EXP2F
7671 || fcode == BUILT_IN_EXP2L))
7672 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7673 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7675 /* Optimize logN(func()) for various exponential functions. We
7676 want to determine the value "x" and the power "exponent" in
7677 order to transform logN(x**exponent) into exponent*logN(x). */
7678 if (flag_unsafe_math_optimizations)
7680 tree exponent = 0, x = 0;
7684 CASE_FLT_FN (BUILT_IN_EXP):
7685 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7686 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7688 exponent = CALL_EXPR_ARG (arg, 0);
7690 CASE_FLT_FN (BUILT_IN_EXP2):
7691 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7692 x = build_real (type, dconst2);
7693 exponent = CALL_EXPR_ARG (arg, 0);
7695 CASE_FLT_FN (BUILT_IN_EXP10):
7696 CASE_FLT_FN (BUILT_IN_POW10):
7697 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7699 REAL_VALUE_TYPE dconst10;
7700 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7701 x = build_real (type, dconst10);
7703 exponent = CALL_EXPR_ARG (arg, 0);
7705 CASE_FLT_FN (BUILT_IN_SQRT):
7706 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7707 x = CALL_EXPR_ARG (arg, 0);
7708 exponent = build_real (type, dconsthalf);
7710 CASE_FLT_FN (BUILT_IN_CBRT):
7711 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7712 x = CALL_EXPR_ARG (arg, 0);
7713 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7716 CASE_FLT_FN (BUILT_IN_POW):
7717 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7718 x = CALL_EXPR_ARG (arg, 0);
7719 exponent = CALL_EXPR_ARG (arg, 1);
7725 /* Now perform the optimization. */
7728 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7729 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7737 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7738 NULL_TREE if no simplification can be made. */
7741 fold_builtin_hypot (location_t loc, tree fndecl,
7742 tree arg0, tree arg1, tree type)
7744 tree res, narg0, narg1;
7746 if (!validate_arg (arg0, REAL_TYPE)
7747 || !validate_arg (arg1, REAL_TYPE))
7750 /* Calculate the result when the argument is a constant. */
7751 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7754 /* If either argument to hypot has a negate or abs, strip that off.
7755 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7756 narg0 = fold_strip_sign_ops (arg0);
7757 narg1 = fold_strip_sign_ops (arg1);
7760 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7761 narg1 ? narg1 : arg1);
7764 /* If either argument is zero, hypot is fabs of the other. */
7765 if (real_zerop (arg0))
7766 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7767 else if (real_zerop (arg1))
7768 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7770 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7771 if (flag_unsafe_math_optimizations
7772 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7774 const REAL_VALUE_TYPE sqrt2_trunc
7775 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7776 return fold_build2_loc (loc, MULT_EXPR, type,
7777 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7778 build_real (type, sqrt2_trunc));
7785 /* Fold a builtin function call to pow, powf, or powl. Return
7786 NULL_TREE if no simplification can be made. */
7788 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7792 if (!validate_arg (arg0, REAL_TYPE)
7793 || !validate_arg (arg1, REAL_TYPE))
7796 /* Calculate the result when the argument is a constant. */
7797 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7800 /* Optimize pow(1.0,y) = 1.0. */
7801 if (real_onep (arg0))
7802 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7804 if (TREE_CODE (arg1) == REAL_CST
7805 && !TREE_OVERFLOW (arg1))
7807 REAL_VALUE_TYPE cint;
7811 c = TREE_REAL_CST (arg1);
7813 /* Optimize pow(x,0.0) = 1.0. */
7814 if (REAL_VALUES_EQUAL (c, dconst0))
7815 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7818 /* Optimize pow(x,1.0) = x. */
7819 if (REAL_VALUES_EQUAL (c, dconst1))
7822 /* Optimize pow(x,-1.0) = 1.0/x. */
7823 if (REAL_VALUES_EQUAL (c, dconstm1))
7824 return fold_build2_loc (loc, RDIV_EXPR, type,
7825 build_real (type, dconst1), arg0);
7827 /* Optimize pow(x,0.5) = sqrt(x). */
7828 if (flag_unsafe_math_optimizations
7829 && REAL_VALUES_EQUAL (c, dconsthalf))
7831 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7833 if (sqrtfn != NULL_TREE)
7834 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7837 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7838 if (flag_unsafe_math_optimizations)
7840 const REAL_VALUE_TYPE dconstroot
7841 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7843 if (REAL_VALUES_EQUAL (c, dconstroot))
7845 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7846 if (cbrtfn != NULL_TREE)
7847 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7851 /* Check for an integer exponent. */
7852 n = real_to_integer (&c);
7853 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7854 if (real_identical (&c, &cint))
7856 /* Attempt to evaluate pow at compile-time, unless this should
7857 raise an exception. */
7858 if (TREE_CODE (arg0) == REAL_CST
7859 && !TREE_OVERFLOW (arg0)
7861 || (!flag_trapping_math && !flag_errno_math)
7862 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7867 x = TREE_REAL_CST (arg0);
7868 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7869 if (flag_unsafe_math_optimizations || !inexact)
7870 return build_real (type, x);
7873 /* Strip sign ops from even integer powers. */
7874 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7876 tree narg0 = fold_strip_sign_ops (arg0);
7878 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7883 if (flag_unsafe_math_optimizations)
7885 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7887 /* Optimize pow(expN(x),y) = expN(x*y). */
7888 if (BUILTIN_EXPONENT_P (fcode))
7890 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7891 tree arg = CALL_EXPR_ARG (arg0, 0);
7892 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7893 return build_call_expr_loc (loc, expfn, 1, arg);
7896 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7897 if (BUILTIN_SQRT_P (fcode))
7899 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7900 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7901 build_real (type, dconsthalf));
7902 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7905 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7906 if (BUILTIN_CBRT_P (fcode))
7908 tree arg = CALL_EXPR_ARG (arg0, 0);
7909 if (tree_expr_nonnegative_p (arg))
7911 const REAL_VALUE_TYPE dconstroot
7912 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7913 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7914 build_real (type, dconstroot));
7915 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7919 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7920 if (fcode == BUILT_IN_POW
7921 || fcode == BUILT_IN_POWF
7922 || fcode == BUILT_IN_POWL)
7924 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7925 if (tree_expr_nonnegative_p (arg00))
7927 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7928 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7929 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7937 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7938 Return NULL_TREE if no simplification can be made. */
7940 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7941 tree arg0, tree arg1, tree type)
7943 if (!validate_arg (arg0, REAL_TYPE)
7944 || !validate_arg (arg1, INTEGER_TYPE))
7947 /* Optimize pow(1.0,y) = 1.0. */
7948 if (real_onep (arg0))
7949 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7951 if (host_integerp (arg1, 0))
7953 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7955 /* Evaluate powi at compile-time. */
7956 if (TREE_CODE (arg0) == REAL_CST
7957 && !TREE_OVERFLOW (arg0))
7960 x = TREE_REAL_CST (arg0);
7961 real_powi (&x, TYPE_MODE (type), &x, c);
7962 return build_real (type, x);
7965 /* Optimize pow(x,0) = 1.0. */
7967 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7970 /* Optimize pow(x,1) = x. */
7974 /* Optimize pow(x,-1) = 1.0/x. */
7976 return fold_build2_loc (loc, RDIV_EXPR, type,
7977 build_real (type, dconst1), arg0);
7983 /* A subroutine of fold_builtin to fold the various exponent
7984 functions. Return NULL_TREE if no simplification can be made.
7985 FUNC is the corresponding MPFR exponent function. */
7988 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7989 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7991 if (validate_arg (arg, REAL_TYPE))
7993 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7996 /* Calculate the result when the argument is a constant. */
7997 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8000 /* Optimize expN(logN(x)) = x. */
8001 if (flag_unsafe_math_optimizations)
8003 const enum built_in_function fcode = builtin_mathfn_code (arg);
8005 if ((func == mpfr_exp
8006 && (fcode == BUILT_IN_LOG
8007 || fcode == BUILT_IN_LOGF
8008 || fcode == BUILT_IN_LOGL))
8009 || (func == mpfr_exp2
8010 && (fcode == BUILT_IN_LOG2
8011 || fcode == BUILT_IN_LOG2F
8012 || fcode == BUILT_IN_LOG2L))
8013 || (func == mpfr_exp10
8014 && (fcode == BUILT_IN_LOG10
8015 || fcode == BUILT_IN_LOG10F
8016 || fcode == BUILT_IN_LOG10L)))
8017 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8024 /* Return true if VAR is a VAR_DECL or a component thereof. */
8027 var_decl_component_p (tree var)
8030 while (handled_component_p (inner))
8031 inner = TREE_OPERAND (inner, 0);
8032 return SSA_VAR_P (inner);
8035 /* Fold function call to builtin memset. Return
8036 NULL_TREE if no simplification can be made. */
8039 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8040 tree type, bool ignore)
8042 tree var, ret, etype;
8043 unsigned HOST_WIDE_INT length, cval;
8045 if (! validate_arg (dest, POINTER_TYPE)
8046 || ! validate_arg (c, INTEGER_TYPE)
8047 || ! validate_arg (len, INTEGER_TYPE))
8050 if (! host_integerp (len, 1))
8053 /* If the LEN parameter is zero, return DEST. */
8054 if (integer_zerop (len))
8055 return omit_one_operand_loc (loc, type, dest, c);
8057 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8062 if (TREE_CODE (var) != ADDR_EXPR)
8065 var = TREE_OPERAND (var, 0);
8066 if (TREE_THIS_VOLATILE (var))
8069 etype = TREE_TYPE (var);
8070 if (TREE_CODE (etype) == ARRAY_TYPE)
8071 etype = TREE_TYPE (etype);
8073 if (!INTEGRAL_TYPE_P (etype)
8074 && !POINTER_TYPE_P (etype))
8077 if (! var_decl_component_p (var))
8080 length = tree_low_cst (len, 1);
8081 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8082 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8086 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8089 if (integer_zerop (c))
8093 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8096 cval = tree_low_cst (c, 1);
8100 cval |= (cval << 31) << 1;
8103 ret = build_int_cst_type (etype, cval);
8104 var = build_fold_indirect_ref_loc (loc,
8105 fold_convert_loc (loc,
8106 build_pointer_type (etype),
8108 ret = build2 (MODIFY_EXPR, etype, var, ret);
8112 return omit_one_operand_loc (loc, type, dest, ret);
8115 /* Fold function call to builtin memset. Return
8116 NULL_TREE if no simplification can be made. */
8119 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8121 if (! validate_arg (dest, POINTER_TYPE)
8122 || ! validate_arg (size, INTEGER_TYPE))
8128 /* New argument list transforming bzero(ptr x, int y) to
8129 memset(ptr x, int 0, size_t y). This is done this way
8130 so that if it isn't expanded inline, we fallback to
8131 calling bzero instead of memset. */
8133 return fold_builtin_memset (loc, dest, integer_zero_node,
8134 fold_convert_loc (loc, sizetype, size),
8135 void_type_node, ignore);
8138 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8139 NULL_TREE if no simplification can be made.
8140 If ENDP is 0, return DEST (like memcpy).
8141 If ENDP is 1, return DEST+LEN (like mempcpy).
8142 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8143 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8147 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8148 tree len, tree type, bool ignore, int endp)
8150 tree destvar, srcvar, expr;
8152 if (! validate_arg (dest, POINTER_TYPE)
8153 || ! validate_arg (src, POINTER_TYPE)
8154 || ! validate_arg (len, INTEGER_TYPE))
8157 /* If the LEN parameter is zero, return DEST. */
8158 if (integer_zerop (len))
8159 return omit_one_operand_loc (loc, type, dest, src);
8161 /* If SRC and DEST are the same (and not volatile), return
8162 DEST{,+LEN,+LEN-1}. */
8163 if (operand_equal_p (src, dest, 0))
8167 tree srctype, desttype;
8168 int src_align, dest_align;
8172 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8173 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8175 /* Both DEST and SRC must be pointer types.
8176 ??? This is what old code did. Is the testing for pointer types
8179 If either SRC is readonly or length is 1, we can use memcpy. */
8180 if (!dest_align || !src_align)
8182 if (readonly_data_expr (src)
8183 || (host_integerp (len, 1)
8184 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8185 >= tree_low_cst (len, 1))))
8187 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8190 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8193 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8194 srcvar = build_fold_indirect_ref_loc (loc, src);
8195 destvar = build_fold_indirect_ref_loc (loc, dest);
8197 && !TREE_THIS_VOLATILE (srcvar)
8199 && !TREE_THIS_VOLATILE (destvar))
8201 tree src_base, dest_base, fn;
8202 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8203 HOST_WIDE_INT size = -1;
8204 HOST_WIDE_INT maxsize = -1;
8207 if (handled_component_p (src_base))
8208 src_base = get_ref_base_and_extent (src_base, &src_offset,
8210 dest_base = destvar;
8211 if (handled_component_p (dest_base))
8212 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8214 if (host_integerp (len, 1))
8216 maxsize = tree_low_cst (len, 1);
8218 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8221 maxsize *= BITS_PER_UNIT;
8225 if (SSA_VAR_P (src_base)
8226 && SSA_VAR_P (dest_base))
8228 if (operand_equal_p (src_base, dest_base, 0)
8229 && ranges_overlap_p (src_offset, maxsize,
8230 dest_offset, maxsize))
8233 else if (TREE_CODE (src_base) == INDIRECT_REF
8234 && TREE_CODE (dest_base) == INDIRECT_REF)
8236 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8237 TREE_OPERAND (dest_base, 0), 0)
8238 || ranges_overlap_p (src_offset, maxsize,
8239 dest_offset, maxsize))
8245 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8248 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8253 if (!host_integerp (len, 0))
8256 This logic lose for arguments like (type *)malloc (sizeof (type)),
8257 since we strip the casts of up to VOID return value from malloc.
8258 Perhaps we ought to inherit type from non-VOID argument here? */
8261 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8262 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8264 tree tem = TREE_OPERAND (src, 0);
8266 if (tem != TREE_OPERAND (src, 0))
8267 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8269 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8271 tree tem = TREE_OPERAND (dest, 0);
8273 if (tem != TREE_OPERAND (dest, 0))
8274 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8276 srctype = TREE_TYPE (TREE_TYPE (src));
8278 && TREE_CODE (srctype) == ARRAY_TYPE
8279 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8281 srctype = TREE_TYPE (srctype);
8283 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8285 desttype = TREE_TYPE (TREE_TYPE (dest));
8287 && TREE_CODE (desttype) == ARRAY_TYPE
8288 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8290 desttype = TREE_TYPE (desttype);
8292 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8294 if (!srctype || !desttype
8295 || !TYPE_SIZE_UNIT (srctype)
8296 || !TYPE_SIZE_UNIT (desttype)
8297 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8298 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8299 || TYPE_VOLATILE (srctype)
8300 || TYPE_VOLATILE (desttype))
8303 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8304 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8305 if (dest_align < (int) TYPE_ALIGN (desttype)
8306 || src_align < (int) TYPE_ALIGN (srctype))
8310 dest = builtin_save_expr (dest);
8313 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8315 srcvar = build_fold_indirect_ref_loc (loc, src);
8316 if (TREE_THIS_VOLATILE (srcvar))
8318 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8320 /* With memcpy, it is possible to bypass aliasing rules, so without
8321 this check i.e. execute/20060930-2.c would be misoptimized,
8322 because it use conflicting alias set to hold argument for the
8323 memcpy call. This check is probably unnecessary with
8324 -fno-strict-aliasing. Similarly for destvar. See also
8326 else if (!var_decl_component_p (srcvar))
8330 destvar = NULL_TREE;
8331 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8333 destvar = build_fold_indirect_ref_loc (loc, dest);
8334 if (TREE_THIS_VOLATILE (destvar))
8336 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8337 destvar = NULL_TREE;
8338 else if (!var_decl_component_p (destvar))
8339 destvar = NULL_TREE;
8342 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8345 if (srcvar == NULL_TREE)
8348 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8351 srctype = build_qualified_type (desttype, 0);
8352 if (src_align < (int) TYPE_ALIGN (srctype))
8354 if (AGGREGATE_TYPE_P (srctype)
8355 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8358 srctype = build_variant_type_copy (srctype);
8359 TYPE_ALIGN (srctype) = src_align;
8360 TYPE_USER_ALIGN (srctype) = 1;
8361 TYPE_PACKED (srctype) = 1;
8363 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8364 src = fold_convert_loc (loc, srcptype, src);
8365 srcvar = build_fold_indirect_ref_loc (loc, src);
8367 else if (destvar == NULL_TREE)
8370 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8373 desttype = build_qualified_type (srctype, 0);
8374 if (dest_align < (int) TYPE_ALIGN (desttype))
8376 if (AGGREGATE_TYPE_P (desttype)
8377 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8380 desttype = build_variant_type_copy (desttype);
8381 TYPE_ALIGN (desttype) = dest_align;
8382 TYPE_USER_ALIGN (desttype) = 1;
8383 TYPE_PACKED (desttype) = 1;
8385 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8386 dest = fold_convert_loc (loc, destptype, dest);
8387 destvar = build_fold_indirect_ref_loc (loc, dest);
8390 if (srctype == desttype
8391 || (gimple_in_ssa_p (cfun)
8392 && useless_type_conversion_p (desttype, srctype)))
8394 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8395 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8396 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8397 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8398 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8400 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8401 TREE_TYPE (destvar), srcvar);
8402 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8408 if (endp == 0 || endp == 3)
8409 return omit_one_operand_loc (loc, type, dest, expr);
8415 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8418 len = fold_convert_loc (loc, sizetype, len);
8419 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8420 dest = fold_convert_loc (loc, type, dest);
8422 dest = omit_one_operand_loc (loc, type, dest, expr);
8426 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8427 If LEN is not NULL, it represents the length of the string to be
8428 copied. Return NULL_TREE if no simplification can be made. */
8431 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8435 if (!validate_arg (dest, POINTER_TYPE)
8436 || !validate_arg (src, POINTER_TYPE))
8439 /* If SRC and DEST are the same (and not volatile), return DEST. */
8440 if (operand_equal_p (src, dest, 0))
8441 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8443 if (optimize_function_for_size_p (cfun))
8446 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8452 len = c_strlen (src, 1);
8453 if (! len || TREE_SIDE_EFFECTS (len))
8457 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8458 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8459 build_call_expr_loc (loc, fn, 3, dest, src, len));
8462 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8463 Return NULL_TREE if no simplification can be made. */
8466 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8468 tree fn, len, lenp1, call, type;
8470 if (!validate_arg (dest, POINTER_TYPE)
8471 || !validate_arg (src, POINTER_TYPE))
8474 len = c_strlen (src, 1);
8476 || TREE_CODE (len) != INTEGER_CST)
8479 if (optimize_function_for_size_p (cfun)
8480 /* If length is zero it's small enough. */
8481 && !integer_zerop (len))
8484 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8488 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8489 /* We use dest twice in building our expression. Save it from
8490 multiple expansions. */
8491 dest = builtin_save_expr (dest);
8492 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8494 type = TREE_TYPE (TREE_TYPE (fndecl));
8495 len = fold_convert_loc (loc, sizetype, len);
8496 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8497 dest = fold_convert_loc (loc, type, dest);
8498 dest = omit_one_operand_loc (loc, type, dest, call);
8502 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8503 If SLEN is not NULL, it represents the length of the source string.
8504 Return NULL_TREE if no simplification can be made. */
8507 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8508 tree src, tree len, tree slen)
8512 if (!validate_arg (dest, POINTER_TYPE)
8513 || !validate_arg (src, POINTER_TYPE)
8514 || !validate_arg (len, INTEGER_TYPE))
8517 /* If the LEN parameter is zero, return DEST. */
8518 if (integer_zerop (len))
8519 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8521 /* We can't compare slen with len as constants below if len is not a
8523 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8527 slen = c_strlen (src, 1);
8529 /* Now, we must be passed a constant src ptr parameter. */
8530 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8533 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8535 /* We do not support simplification of this case, though we do
8536 support it when expanding trees into RTL. */
8537 /* FIXME: generate a call to __builtin_memset. */
8538 if (tree_int_cst_lt (slen, len))
8541 /* OK transform into builtin memcpy. */
8542 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8545 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8546 build_call_expr_loc (loc, fn, 3, dest, src, len));
8549 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8550 arguments to the call, and TYPE is its return type.
8551 Return NULL_TREE if no simplification can be made. */
8554 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8556 if (!validate_arg (arg1, POINTER_TYPE)
8557 || !validate_arg (arg2, INTEGER_TYPE)
8558 || !validate_arg (len, INTEGER_TYPE))
8564 if (TREE_CODE (arg2) != INTEGER_CST
8565 || !host_integerp (len, 1))
8568 p1 = c_getstr (arg1);
8569 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8575 if (target_char_cast (arg2, &c))
8578 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8581 return build_int_cst (TREE_TYPE (arg1), 0);
8583 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8585 return fold_convert_loc (loc, type, tem);
8591 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8592 Return NULL_TREE if no simplification can be made. */
8595 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8597 const char *p1, *p2;
8599 if (!validate_arg (arg1, POINTER_TYPE)
8600 || !validate_arg (arg2, POINTER_TYPE)
8601 || !validate_arg (len, INTEGER_TYPE))
8604 /* If the LEN parameter is zero, return zero. */
8605 if (integer_zerop (len))
8606 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8609 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8610 if (operand_equal_p (arg1, arg2, 0))
8611 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8613 p1 = c_getstr (arg1);
8614 p2 = c_getstr (arg2);
8616 /* If all arguments are constant, and the value of len is not greater
8617 than the lengths of arg1 and arg2, evaluate at compile-time. */
8618 if (host_integerp (len, 1) && p1 && p2
8619 && compare_tree_int (len, strlen (p1) + 1) <= 0
8620 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8622 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8625 return integer_one_node;
8627 return integer_minus_one_node;
8629 return integer_zero_node;
8632 /* If len parameter is one, return an expression corresponding to
8633 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8634 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8636 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8637 tree cst_uchar_ptr_node
8638 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8641 = fold_convert_loc (loc, integer_type_node,
8642 build1 (INDIRECT_REF, cst_uchar_node,
8643 fold_convert_loc (loc,
8647 = fold_convert_loc (loc, integer_type_node,
8648 build1 (INDIRECT_REF, cst_uchar_node,
8649 fold_convert_loc (loc,
8652 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8658 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8659 Return NULL_TREE if no simplification can be made. */
8662 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8664 const char *p1, *p2;
8666 if (!validate_arg (arg1, POINTER_TYPE)
8667 || !validate_arg (arg2, POINTER_TYPE))
8670 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8671 if (operand_equal_p (arg1, arg2, 0))
8672 return integer_zero_node;
8674 p1 = c_getstr (arg1);
8675 p2 = c_getstr (arg2);
8679 const int i = strcmp (p1, p2);
8681 return integer_minus_one_node;
8683 return integer_one_node;
8685 return integer_zero_node;
8688 /* If the second arg is "", return *(const unsigned char*)arg1. */
8689 if (p2 && *p2 == '\0')
8691 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8692 tree cst_uchar_ptr_node
8693 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8695 return fold_convert_loc (loc, integer_type_node,
8696 build1 (INDIRECT_REF, cst_uchar_node,
8697 fold_convert_loc (loc,
8702 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8703 if (p1 && *p1 == '\0')
8705 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8706 tree cst_uchar_ptr_node
8707 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8710 = fold_convert_loc (loc, integer_type_node,
8711 build1 (INDIRECT_REF, cst_uchar_node,
8712 fold_convert_loc (loc,
8715 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8721 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8722 Return NULL_TREE if no simplification can be made. */
8725 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8727 const char *p1, *p2;
8729 if (!validate_arg (arg1, POINTER_TYPE)
8730 || !validate_arg (arg2, POINTER_TYPE)
8731 || !validate_arg (len, INTEGER_TYPE))
8734 /* If the LEN parameter is zero, return zero. */
8735 if (integer_zerop (len))
8736 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8739 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8740 if (operand_equal_p (arg1, arg2, 0))
8741 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8743 p1 = c_getstr (arg1);
8744 p2 = c_getstr (arg2);
8746 if (host_integerp (len, 1) && p1 && p2)
8748 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8750 return integer_one_node;
8752 return integer_minus_one_node;
8754 return integer_zero_node;
8757 /* If the second arg is "", and the length is greater than zero,
8758 return *(const unsigned char*)arg1. */
8759 if (p2 && *p2 == '\0'
8760 && TREE_CODE (len) == INTEGER_CST
8761 && tree_int_cst_sgn (len) == 1)
8763 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8764 tree cst_uchar_ptr_node
8765 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8767 return fold_convert_loc (loc, integer_type_node,
8768 build1 (INDIRECT_REF, cst_uchar_node,
8769 fold_convert_loc (loc,
8774 /* If the first arg is "", and the length is greater than zero,
8775 return -*(const unsigned char*)arg2. */
8776 if (p1 && *p1 == '\0'
8777 && TREE_CODE (len) == INTEGER_CST
8778 && tree_int_cst_sgn (len) == 1)
8780 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8781 tree cst_uchar_ptr_node
8782 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8784 tree temp = fold_convert_loc (loc, integer_type_node,
8785 build1 (INDIRECT_REF, cst_uchar_node,
8786 fold_convert_loc (loc,
8789 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8792 /* If len parameter is one, return an expression corresponding to
8793 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8794 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8796 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8797 tree cst_uchar_ptr_node
8798 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8800 tree ind1 = fold_convert_loc (loc, integer_type_node,
8801 build1 (INDIRECT_REF, cst_uchar_node,
8802 fold_convert_loc (loc,
8805 tree ind2 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8810 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8816 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8817 ARG. Return NULL_TREE if no simplification can be made. */
8820 fold_builtin_signbit (location_t loc, tree arg, tree type)
8824 if (!validate_arg (arg, REAL_TYPE))
8827 /* If ARG is a compile-time constant, determine the result. */
8828 if (TREE_CODE (arg) == REAL_CST
8829 && !TREE_OVERFLOW (arg))
8833 c = TREE_REAL_CST (arg);
8834 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8835 return fold_convert_loc (loc, type, temp);
8838 /* If ARG is non-negative, the result is always zero. */
8839 if (tree_expr_nonnegative_p (arg))
8840 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8842 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8843 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8844 return fold_build2_loc (loc, LT_EXPR, type, arg,
8845 build_real (TREE_TYPE (arg), dconst0));
8850 /* Fold function call to builtin copysign, copysignf or copysignl with
8851 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8855 fold_builtin_copysign (location_t loc, tree fndecl,
8856 tree arg1, tree arg2, tree type)
8860 if (!validate_arg (arg1, REAL_TYPE)
8861 || !validate_arg (arg2, REAL_TYPE))
8864 /* copysign(X,X) is X. */
8865 if (operand_equal_p (arg1, arg2, 0))
8866 return fold_convert_loc (loc, type, arg1);
8868 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8869 if (TREE_CODE (arg1) == REAL_CST
8870 && TREE_CODE (arg2) == REAL_CST
8871 && !TREE_OVERFLOW (arg1)
8872 && !TREE_OVERFLOW (arg2))
8874 REAL_VALUE_TYPE c1, c2;
8876 c1 = TREE_REAL_CST (arg1);
8877 c2 = TREE_REAL_CST (arg2);
8878 /* c1.sign := c2.sign. */
8879 real_copysign (&c1, &c2);
8880 return build_real (type, c1);
8883 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8884 Remember to evaluate Y for side-effects. */
8885 if (tree_expr_nonnegative_p (arg2))
8886 return omit_one_operand_loc (loc, type,
8887 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8890 /* Strip sign changing operations for the first argument. */
8891 tem = fold_strip_sign_ops (arg1);
8893 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8898 /* Fold a call to builtin isascii with argument ARG. */
8901 fold_builtin_isascii (location_t loc, tree arg)
8903 if (!validate_arg (arg, INTEGER_TYPE))
8907 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8908 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8909 build_int_cst (NULL_TREE,
8910 ~ (unsigned HOST_WIDE_INT) 0x7f));
8911 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8912 arg, integer_zero_node);
8916 /* Fold a call to builtin toascii with argument ARG. */
8919 fold_builtin_toascii (location_t loc, tree arg)
8921 if (!validate_arg (arg, INTEGER_TYPE))
8924 /* Transform toascii(c) -> (c & 0x7f). */
8925 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8926 build_int_cst (NULL_TREE, 0x7f));
8929 /* Fold a call to builtin isdigit with argument ARG. */
8932 fold_builtin_isdigit (location_t loc, tree arg)
8934 if (!validate_arg (arg, INTEGER_TYPE))
8938 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8939 /* According to the C standard, isdigit is unaffected by locale.
8940 However, it definitely is affected by the target character set. */
8941 unsigned HOST_WIDE_INT target_digit0
8942 = lang_hooks.to_target_charset ('0');
8944 if (target_digit0 == 0)
8947 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8948 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8949 build_int_cst (unsigned_type_node, target_digit0));
8950 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8951 build_int_cst (unsigned_type_node, 9));
8955 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8958 fold_builtin_fabs (location_t loc, tree arg, tree type)
8960 if (!validate_arg (arg, REAL_TYPE))
8963 arg = fold_convert_loc (loc, type, arg);
8964 if (TREE_CODE (arg) == REAL_CST)
8965 return fold_abs_const (arg, type);
8966 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8969 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8972 fold_builtin_abs (location_t loc, tree arg, tree type)
8974 if (!validate_arg (arg, INTEGER_TYPE))
8977 arg = fold_convert_loc (loc, type, arg);
8978 if (TREE_CODE (arg) == INTEGER_CST)
8979 return fold_abs_const (arg, type);
8980 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8983 /* Fold a call to builtin fmin or fmax. */
8986 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8987 tree type, bool max)
8989 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8991 /* Calculate the result when the argument is a constant. */
8992 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8997 /* If either argument is NaN, return the other one. Avoid the
8998 transformation if we get (and honor) a signalling NaN. Using
8999 omit_one_operand() ensures we create a non-lvalue. */
9000 if (TREE_CODE (arg0) == REAL_CST
9001 && real_isnan (&TREE_REAL_CST (arg0))
9002 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9003 || ! TREE_REAL_CST (arg0).signalling))
9004 return omit_one_operand_loc (loc, type, arg1, arg0);
9005 if (TREE_CODE (arg1) == REAL_CST
9006 && real_isnan (&TREE_REAL_CST (arg1))
9007 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9008 || ! TREE_REAL_CST (arg1).signalling))
9009 return omit_one_operand_loc (loc, type, arg0, arg1);
9011 /* Transform fmin/fmax(x,x) -> x. */
9012 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9013 return omit_one_operand_loc (loc, type, arg0, arg1);
9015 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9016 functions to return the numeric arg if the other one is NaN.
9017 These tree codes don't honor that, so only transform if
9018 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9019 handled, so we don't have to worry about it either. */
9020 if (flag_finite_math_only)
9021 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9022 fold_convert_loc (loc, type, arg0),
9023 fold_convert_loc (loc, type, arg1));
9028 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9031 fold_builtin_carg (location_t loc, tree arg, tree type)
9033 if (validate_arg (arg, COMPLEX_TYPE)
9034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9036 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9040 tree new_arg = builtin_save_expr (arg);
9041 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9042 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9043 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9050 /* Fold a call to builtin logb/ilogb. */
9053 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9055 if (! validate_arg (arg, REAL_TYPE))
9060 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9062 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9068 /* If arg is Inf or NaN and we're logb, return it. */
9069 if (TREE_CODE (rettype) == REAL_TYPE)
9070 return fold_convert_loc (loc, rettype, arg);
9071 /* Fall through... */
9073 /* Zero may set errno and/or raise an exception for logb, also
9074 for ilogb we don't know FP_ILOGB0. */
9077 /* For normal numbers, proceed iff radix == 2. In GCC,
9078 normalized significands are in the range [0.5, 1.0). We
9079 want the exponent as if they were [1.0, 2.0) so get the
9080 exponent and subtract 1. */
9081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9082 return fold_convert_loc (loc, rettype,
9083 build_int_cst (NULL_TREE,
9084 REAL_EXP (value)-1));
9092 /* Fold a call to builtin significand, if radix == 2. */
9095 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9097 if (! validate_arg (arg, REAL_TYPE))
9102 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9104 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9111 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9112 return fold_convert_loc (loc, rettype, arg);
9114 /* For normal numbers, proceed iff radix == 2. */
9115 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9117 REAL_VALUE_TYPE result = *value;
9118 /* In GCC, normalized significands are in the range [0.5,
9119 1.0). We want them to be [1.0, 2.0) so set the
9121 SET_REAL_EXP (&result, 1);
9122 return build_real (rettype, result);
9131 /* Fold a call to builtin frexp, we can assume the base is 2. */
9134 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9136 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9141 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9144 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9146 /* Proceed if a valid pointer type was passed in. */
9147 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9149 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9155 /* For +-0, return (*exp = 0, +-0). */
9156 exp = integer_zero_node;
9161 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9162 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9165 /* Since the frexp function always expects base 2, and in
9166 GCC normalized significands are already in the range
9167 [0.5, 1.0), we have exactly what frexp wants. */
9168 REAL_VALUE_TYPE frac_rvt = *value;
9169 SET_REAL_EXP (&frac_rvt, 0);
9170 frac = build_real (rettype, frac_rvt);
9171 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9178 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9179 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9180 TREE_SIDE_EFFECTS (arg1) = 1;
9181 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9187 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9188 then we can assume the base is two. If it's false, then we have to
9189 check the mode of the TYPE parameter in certain cases. */
9192 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9193 tree type, bool ldexp)
9195 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9200 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9201 if (real_zerop (arg0) || integer_zerop (arg1)
9202 || (TREE_CODE (arg0) == REAL_CST
9203 && !real_isfinite (&TREE_REAL_CST (arg0))))
9204 return omit_one_operand_loc (loc, type, arg0, arg1);
9206 /* If both arguments are constant, then try to evaluate it. */
9207 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9208 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9209 && host_integerp (arg1, 0))
9211 /* Bound the maximum adjustment to twice the range of the
9212 mode's valid exponents. Use abs to ensure the range is
9213 positive as a sanity check. */
9214 const long max_exp_adj = 2 *
9215 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9216 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9218 /* Get the user-requested adjustment. */
9219 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9221 /* The requested adjustment must be inside this range. This
9222 is a preliminary cap to avoid things like overflow, we
9223 may still fail to compute the result for other reasons. */
9224 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9226 REAL_VALUE_TYPE initial_result;
9228 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9230 /* Ensure we didn't overflow. */
9231 if (! real_isinf (&initial_result))
9233 const REAL_VALUE_TYPE trunc_result
9234 = real_value_truncate (TYPE_MODE (type), initial_result);
9236 /* Only proceed if the target mode can hold the
9238 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9239 return build_real (type, trunc_result);
9248 /* Fold a call to builtin modf. */
9251 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9253 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9258 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9261 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9263 /* Proceed if a valid pointer type was passed in. */
9264 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9266 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9267 REAL_VALUE_TYPE trunc, frac;
9273 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9274 trunc = frac = *value;
9277 /* For +-Inf, return (*arg1 = arg0, +-0). */
9279 frac.sign = value->sign;
9283 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9284 real_trunc (&trunc, VOIDmode, value);
9285 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9286 /* If the original number was negative and already
9287 integral, then the fractional part is -0.0. */
9288 if (value->sign && frac.cl == rvc_zero)
9289 frac.sign = value->sign;
9293 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9294 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9295 build_real (rettype, trunc));
9296 TREE_SIDE_EFFECTS (arg1) = 1;
9297 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9298 build_real (rettype, frac));
9304 /* Given a location LOC, an interclass builtin function decl FNDECL
9305 and its single argument ARG, return an folded expression computing
9306 the same, or NULL_TREE if we either couldn't or didn't want to fold
9307 (the latter happen if there's an RTL instruction available). */
9310 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9312 enum machine_mode mode;
9314 if (!validate_arg (arg, REAL_TYPE))
9317 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9320 mode = TYPE_MODE (TREE_TYPE (arg));
9322 /* If there is no optab, try generic code. */
9323 switch (DECL_FUNCTION_CODE (fndecl))
9327 CASE_FLT_FN (BUILT_IN_ISINF):
9329 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9330 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9331 tree const type = TREE_TYPE (arg);
9335 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9336 real_from_string (&r, buf);
9337 result = build_call_expr (isgr_fn, 2,
9338 fold_build1_loc (loc, ABS_EXPR, type, arg),
9339 build_real (type, r));
9342 CASE_FLT_FN (BUILT_IN_FINITE):
9343 case BUILT_IN_ISFINITE:
9345 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9346 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9347 tree const type = TREE_TYPE (arg);
9351 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9352 real_from_string (&r, buf);
9353 result = build_call_expr (isle_fn, 2,
9354 fold_build1_loc (loc, ABS_EXPR, type, arg),
9355 build_real (type, r));
9356 /*result = fold_build2_loc (loc, UNGT_EXPR,
9357 TREE_TYPE (TREE_TYPE (fndecl)),
9358 fold_build1_loc (loc, ABS_EXPR, type, arg),
9359 build_real (type, r));
9360 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9361 TREE_TYPE (TREE_TYPE (fndecl)),
9365 case BUILT_IN_ISNORMAL:
9367 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9368 islessequal(fabs(x),DBL_MAX). */
9369 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9370 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9371 tree const type = TREE_TYPE (arg);
9372 REAL_VALUE_TYPE rmax, rmin;
9375 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9376 real_from_string (&rmax, buf);
9377 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9378 real_from_string (&rmin, buf);
9379 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9380 result = build_call_expr (isle_fn, 2, arg,
9381 build_real (type, rmax));
9382 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9383 build_call_expr (isge_fn, 2, arg,
9384 build_real (type, rmin)));
9394 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9395 ARG is the argument for the call. */
9398 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9400 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9403 if (!validate_arg (arg, REAL_TYPE))
9406 switch (builtin_index)
9408 case BUILT_IN_ISINF:
9409 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9410 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9412 if (TREE_CODE (arg) == REAL_CST)
9414 r = TREE_REAL_CST (arg);
9415 if (real_isinf (&r))
9416 return real_compare (GT_EXPR, &r, &dconst0)
9417 ? integer_one_node : integer_minus_one_node;
9419 return integer_zero_node;
9424 case BUILT_IN_ISINF_SIGN:
9426 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9427 /* In a boolean context, GCC will fold the inner COND_EXPR to
9428 1. So e.g. "if (isinf_sign(x))" would be folded to just
9429 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9430 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9431 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9432 tree tmp = NULL_TREE;
9434 arg = builtin_save_expr (arg);
9436 if (signbit_fn && isinf_fn)
9438 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9439 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9441 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9442 signbit_call, integer_zero_node);
9443 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9444 isinf_call, integer_zero_node);
9446 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9447 integer_minus_one_node, integer_one_node);
9448 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9456 case BUILT_IN_ISFINITE:
9457 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9458 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9459 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9461 if (TREE_CODE (arg) == REAL_CST)
9463 r = TREE_REAL_CST (arg);
9464 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9469 case BUILT_IN_ISNAN:
9470 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9471 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9473 if (TREE_CODE (arg) == REAL_CST)
9475 r = TREE_REAL_CST (arg);
9476 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9479 arg = builtin_save_expr (arg);
9480 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9487 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9488 This builtin will generate code to return the appropriate floating
9489 point classification depending on the value of the floating point
9490 number passed in. The possible return values must be supplied as
9491 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9492 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9493 one floating point argument which is "type generic". */
9496 fold_builtin_fpclassify (location_t loc, tree exp)
9498 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9499 arg, type, res, tmp;
9500 enum machine_mode mode;
9504 /* Verify the required arguments in the original call. */
9505 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9506 INTEGER_TYPE, INTEGER_TYPE,
9507 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9510 fp_nan = CALL_EXPR_ARG (exp, 0);
9511 fp_infinite = CALL_EXPR_ARG (exp, 1);
9512 fp_normal = CALL_EXPR_ARG (exp, 2);
9513 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9514 fp_zero = CALL_EXPR_ARG (exp, 4);
9515 arg = CALL_EXPR_ARG (exp, 5);
9516 type = TREE_TYPE (arg);
9517 mode = TYPE_MODE (type);
9518 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9522 (fabs(x) == Inf ? FP_INFINITE :
9523 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9524 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9526 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9527 build_real (type, dconst0));
9528 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9529 tmp, fp_zero, fp_subnormal);
9531 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9532 real_from_string (&r, buf);
9533 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9534 arg, build_real (type, r));
9535 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9537 if (HONOR_INFINITIES (mode))
9540 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9541 build_real (type, r));
9542 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9546 if (HONOR_NANS (mode))
9548 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9549 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9555 /* Fold a call to an unordered comparison function such as
9556 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9557 being called and ARG0 and ARG1 are the arguments for the call.
9558 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9559 the opposite of the desired result. UNORDERED_CODE is used
9560 for modes that can hold NaNs and ORDERED_CODE is used for
9564 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9565 enum tree_code unordered_code,
9566 enum tree_code ordered_code)
9568 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9569 enum tree_code code;
9571 enum tree_code code0, code1;
9572 tree cmp_type = NULL_TREE;
9574 type0 = TREE_TYPE (arg0);
9575 type1 = TREE_TYPE (arg1);
9577 code0 = TREE_CODE (type0);
9578 code1 = TREE_CODE (type1);
9580 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9581 /* Choose the wider of two real types. */
9582 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9584 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9586 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9589 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9590 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9592 if (unordered_code == UNORDERED_EXPR)
9594 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9595 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9596 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9599 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9601 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9602 fold_build2_loc (loc, code, type, arg0, arg1));
9605 /* Fold a call to built-in function FNDECL with 0 arguments.
9606 IGNORE is true if the result of the function call is ignored. This
9607 function returns NULL_TREE if no simplification was possible. */
9610 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9612 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9613 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9616 CASE_FLT_FN (BUILT_IN_INF):
9617 case BUILT_IN_INFD32:
9618 case BUILT_IN_INFD64:
9619 case BUILT_IN_INFD128:
9620 return fold_builtin_inf (loc, type, true);
9622 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9623 return fold_builtin_inf (loc, type, false);
9625 case BUILT_IN_CLASSIFY_TYPE:
9626 return fold_builtin_classify_type (NULL_TREE);
9634 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9635 IGNORE is true if the result of the function call is ignored. This
9636 function returns NULL_TREE if no simplification was possible. */
9639 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9641 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9642 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9646 case BUILT_IN_CONSTANT_P:
9648 tree val = fold_builtin_constant_p (arg0);
9650 /* Gimplification will pull the CALL_EXPR for the builtin out of
9651 an if condition. When not optimizing, we'll not CSE it back.
9652 To avoid link error types of regressions, return false now. */
9653 if (!val && !optimize)
9654 val = integer_zero_node;
9659 case BUILT_IN_CLASSIFY_TYPE:
9660 return fold_builtin_classify_type (arg0);
9662 case BUILT_IN_STRLEN:
9663 return fold_builtin_strlen (loc, type, arg0);
9665 CASE_FLT_FN (BUILT_IN_FABS):
9666 return fold_builtin_fabs (loc, arg0, type);
9670 case BUILT_IN_LLABS:
9671 case BUILT_IN_IMAXABS:
9672 return fold_builtin_abs (loc, arg0, type);
9674 CASE_FLT_FN (BUILT_IN_CONJ):
9675 if (validate_arg (arg0, COMPLEX_TYPE)
9676 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9677 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9680 CASE_FLT_FN (BUILT_IN_CREAL):
9681 if (validate_arg (arg0, COMPLEX_TYPE)
9682 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9683 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9686 CASE_FLT_FN (BUILT_IN_CIMAG):
9687 if (validate_arg (arg0, COMPLEX_TYPE)
9688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9689 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9692 CASE_FLT_FN (BUILT_IN_CCOS):
9693 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9695 CASE_FLT_FN (BUILT_IN_CCOSH):
9696 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9698 CASE_FLT_FN (BUILT_IN_CSIN):
9699 if (validate_arg (arg0, COMPLEX_TYPE)
9700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9701 return do_mpc_arg1 (arg0, type, mpc_sin);
9704 CASE_FLT_FN (BUILT_IN_CSINH):
9705 if (validate_arg (arg0, COMPLEX_TYPE)
9706 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9707 return do_mpc_arg1 (arg0, type, mpc_sinh);
9710 CASE_FLT_FN (BUILT_IN_CTAN):
9711 if (validate_arg (arg0, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9713 return do_mpc_arg1 (arg0, type, mpc_tan);
9716 CASE_FLT_FN (BUILT_IN_CTANH):
9717 if (validate_arg (arg0, COMPLEX_TYPE)
9718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9719 return do_mpc_arg1 (arg0, type, mpc_tanh);
9722 CASE_FLT_FN (BUILT_IN_CLOG):
9723 if (validate_arg (arg0, COMPLEX_TYPE)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9725 return do_mpc_arg1 (arg0, type, mpc_log);
9728 CASE_FLT_FN (BUILT_IN_CSQRT):
9729 if (validate_arg (arg0, COMPLEX_TYPE)
9730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9731 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9734 CASE_FLT_FN (BUILT_IN_CASIN):
9735 if (validate_arg (arg0, COMPLEX_TYPE)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9737 return do_mpc_arg1 (arg0, type, mpc_asin);
9740 CASE_FLT_FN (BUILT_IN_CACOS):
9741 if (validate_arg (arg0, COMPLEX_TYPE)
9742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9743 return do_mpc_arg1 (arg0, type, mpc_acos);
9746 CASE_FLT_FN (BUILT_IN_CATAN):
9747 if (validate_arg (arg0, COMPLEX_TYPE)
9748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9749 return do_mpc_arg1 (arg0, type, mpc_atan);
9752 CASE_FLT_FN (BUILT_IN_CASINH):
9753 if (validate_arg (arg0, COMPLEX_TYPE)
9754 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9755 return do_mpc_arg1 (arg0, type, mpc_asinh);
9758 CASE_FLT_FN (BUILT_IN_CACOSH):
9759 if (validate_arg (arg0, COMPLEX_TYPE)
9760 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9761 return do_mpc_arg1 (arg0, type, mpc_acosh);
9764 CASE_FLT_FN (BUILT_IN_CATANH):
9765 if (validate_arg (arg0, COMPLEX_TYPE)
9766 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9767 return do_mpc_arg1 (arg0, type, mpc_atanh);
9770 CASE_FLT_FN (BUILT_IN_CABS):
9771 return fold_builtin_cabs (loc, arg0, type, fndecl);
9773 CASE_FLT_FN (BUILT_IN_CARG):
9774 return fold_builtin_carg (loc, arg0, type);
9776 CASE_FLT_FN (BUILT_IN_SQRT):
9777 return fold_builtin_sqrt (loc, arg0, type);
9779 CASE_FLT_FN (BUILT_IN_CBRT):
9780 return fold_builtin_cbrt (loc, arg0, type);
9782 CASE_FLT_FN (BUILT_IN_ASIN):
9783 if (validate_arg (arg0, REAL_TYPE))
9784 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9785 &dconstm1, &dconst1, true);
9788 CASE_FLT_FN (BUILT_IN_ACOS):
9789 if (validate_arg (arg0, REAL_TYPE))
9790 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9791 &dconstm1, &dconst1, true);
9794 CASE_FLT_FN (BUILT_IN_ATAN):
9795 if (validate_arg (arg0, REAL_TYPE))
9796 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9799 CASE_FLT_FN (BUILT_IN_ASINH):
9800 if (validate_arg (arg0, REAL_TYPE))
9801 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9804 CASE_FLT_FN (BUILT_IN_ACOSH):
9805 if (validate_arg (arg0, REAL_TYPE))
9806 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9807 &dconst1, NULL, true);
9810 CASE_FLT_FN (BUILT_IN_ATANH):
9811 if (validate_arg (arg0, REAL_TYPE))
9812 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9813 &dconstm1, &dconst1, false);
9816 CASE_FLT_FN (BUILT_IN_SIN):
9817 if (validate_arg (arg0, REAL_TYPE))
9818 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9821 CASE_FLT_FN (BUILT_IN_COS):
9822 return fold_builtin_cos (loc, arg0, type, fndecl);
9824 CASE_FLT_FN (BUILT_IN_TAN):
9825 return fold_builtin_tan (arg0, type);
9827 CASE_FLT_FN (BUILT_IN_CEXP):
9828 return fold_builtin_cexp (loc, arg0, type);
9830 CASE_FLT_FN (BUILT_IN_CEXPI):
9831 if (validate_arg (arg0, REAL_TYPE))
9832 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9835 CASE_FLT_FN (BUILT_IN_SINH):
9836 if (validate_arg (arg0, REAL_TYPE))
9837 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9840 CASE_FLT_FN (BUILT_IN_COSH):
9841 return fold_builtin_cosh (loc, arg0, type, fndecl);
9843 CASE_FLT_FN (BUILT_IN_TANH):
9844 if (validate_arg (arg0, REAL_TYPE))
9845 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9848 CASE_FLT_FN (BUILT_IN_ERF):
9849 if (validate_arg (arg0, REAL_TYPE))
9850 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9853 CASE_FLT_FN (BUILT_IN_ERFC):
9854 if (validate_arg (arg0, REAL_TYPE))
9855 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9858 CASE_FLT_FN (BUILT_IN_TGAMMA):
9859 if (validate_arg (arg0, REAL_TYPE))
9860 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9863 CASE_FLT_FN (BUILT_IN_EXP):
9864 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9866 CASE_FLT_FN (BUILT_IN_EXP2):
9867 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9869 CASE_FLT_FN (BUILT_IN_EXP10):
9870 CASE_FLT_FN (BUILT_IN_POW10):
9871 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9873 CASE_FLT_FN (BUILT_IN_EXPM1):
9874 if (validate_arg (arg0, REAL_TYPE))
9875 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9878 CASE_FLT_FN (BUILT_IN_LOG):
9879 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9881 CASE_FLT_FN (BUILT_IN_LOG2):
9882 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9884 CASE_FLT_FN (BUILT_IN_LOG10):
9885 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9887 CASE_FLT_FN (BUILT_IN_LOG1P):
9888 if (validate_arg (arg0, REAL_TYPE))
9889 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9890 &dconstm1, NULL, false);
9893 CASE_FLT_FN (BUILT_IN_J0):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9899 CASE_FLT_FN (BUILT_IN_J1):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9905 CASE_FLT_FN (BUILT_IN_Y0):
9906 if (validate_arg (arg0, REAL_TYPE))
9907 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9908 &dconst0, NULL, false);
9911 CASE_FLT_FN (BUILT_IN_Y1):
9912 if (validate_arg (arg0, REAL_TYPE))
9913 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9914 &dconst0, NULL, false);
9917 CASE_FLT_FN (BUILT_IN_NAN):
9918 case BUILT_IN_NAND32:
9919 case BUILT_IN_NAND64:
9920 case BUILT_IN_NAND128:
9921 return fold_builtin_nan (arg0, type, true);
9923 CASE_FLT_FN (BUILT_IN_NANS):
9924 return fold_builtin_nan (arg0, type, false);
9926 CASE_FLT_FN (BUILT_IN_FLOOR):
9927 return fold_builtin_floor (loc, fndecl, arg0);
9929 CASE_FLT_FN (BUILT_IN_CEIL):
9930 return fold_builtin_ceil (loc, fndecl, arg0);
9932 CASE_FLT_FN (BUILT_IN_TRUNC):
9933 return fold_builtin_trunc (loc, fndecl, arg0);
9935 CASE_FLT_FN (BUILT_IN_ROUND):
9936 return fold_builtin_round (loc, fndecl, arg0);
9938 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9939 CASE_FLT_FN (BUILT_IN_RINT):
9940 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9942 CASE_FLT_FN (BUILT_IN_LCEIL):
9943 CASE_FLT_FN (BUILT_IN_LLCEIL):
9944 CASE_FLT_FN (BUILT_IN_LFLOOR):
9945 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9946 CASE_FLT_FN (BUILT_IN_LROUND):
9947 CASE_FLT_FN (BUILT_IN_LLROUND):
9948 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9950 CASE_FLT_FN (BUILT_IN_LRINT):
9951 CASE_FLT_FN (BUILT_IN_LLRINT):
9952 return fold_fixed_mathfn (loc, fndecl, arg0);
9954 case BUILT_IN_BSWAP32:
9955 case BUILT_IN_BSWAP64:
9956 return fold_builtin_bswap (fndecl, arg0);
9958 CASE_INT_FN (BUILT_IN_FFS):
9959 CASE_INT_FN (BUILT_IN_CLZ):
9960 CASE_INT_FN (BUILT_IN_CTZ):
9961 CASE_INT_FN (BUILT_IN_POPCOUNT):
9962 CASE_INT_FN (BUILT_IN_PARITY):
9963 return fold_builtin_bitop (fndecl, arg0);
9965 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9966 return fold_builtin_signbit (loc, arg0, type);
9968 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9969 return fold_builtin_significand (loc, arg0, type);
9971 CASE_FLT_FN (BUILT_IN_ILOGB):
9972 CASE_FLT_FN (BUILT_IN_LOGB):
9973 return fold_builtin_logb (loc, arg0, type);
9975 case BUILT_IN_ISASCII:
9976 return fold_builtin_isascii (loc, arg0);
9978 case BUILT_IN_TOASCII:
9979 return fold_builtin_toascii (loc, arg0);
9981 case BUILT_IN_ISDIGIT:
9982 return fold_builtin_isdigit (loc, arg0);
9984 CASE_FLT_FN (BUILT_IN_FINITE):
9985 case BUILT_IN_FINITED32:
9986 case BUILT_IN_FINITED64:
9987 case BUILT_IN_FINITED128:
9988 case BUILT_IN_ISFINITE:
9990 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9993 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9996 CASE_FLT_FN (BUILT_IN_ISINF):
9997 case BUILT_IN_ISINFD32:
9998 case BUILT_IN_ISINFD64:
9999 case BUILT_IN_ISINFD128:
10001 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10004 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10007 case BUILT_IN_ISNORMAL:
10008 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10010 case BUILT_IN_ISINF_SIGN:
10011 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10013 CASE_FLT_FN (BUILT_IN_ISNAN):
10014 case BUILT_IN_ISNAND32:
10015 case BUILT_IN_ISNAND64:
10016 case BUILT_IN_ISNAND128:
10017 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10019 case BUILT_IN_PRINTF:
10020 case BUILT_IN_PRINTF_UNLOCKED:
10021 case BUILT_IN_VPRINTF:
10022 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10032 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10033 IGNORE is true if the result of the function call is ignored. This
10034 function returns NULL_TREE if no simplification was possible. */
10037 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10040 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10044 CASE_FLT_FN (BUILT_IN_JN):
10045 if (validate_arg (arg0, INTEGER_TYPE)
10046 && validate_arg (arg1, REAL_TYPE))
10047 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10050 CASE_FLT_FN (BUILT_IN_YN):
10051 if (validate_arg (arg0, INTEGER_TYPE)
10052 && validate_arg (arg1, REAL_TYPE))
10053 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10057 CASE_FLT_FN (BUILT_IN_DREM):
10058 CASE_FLT_FN (BUILT_IN_REMAINDER):
10059 if (validate_arg (arg0, REAL_TYPE)
10060 && validate_arg(arg1, REAL_TYPE))
10061 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10064 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10065 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10066 if (validate_arg (arg0, REAL_TYPE)
10067 && validate_arg(arg1, POINTER_TYPE))
10068 return do_mpfr_lgamma_r (arg0, arg1, type);
10071 CASE_FLT_FN (BUILT_IN_ATAN2):
10072 if (validate_arg (arg0, REAL_TYPE)
10073 && validate_arg(arg1, REAL_TYPE))
10074 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10077 CASE_FLT_FN (BUILT_IN_FDIM):
10078 if (validate_arg (arg0, REAL_TYPE)
10079 && validate_arg(arg1, REAL_TYPE))
10080 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10083 CASE_FLT_FN (BUILT_IN_HYPOT):
10084 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10086 CASE_FLT_FN (BUILT_IN_CPOW):
10087 if (validate_arg (arg0, COMPLEX_TYPE)
10088 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10089 && validate_arg (arg1, COMPLEX_TYPE)
10090 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10091 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10094 CASE_FLT_FN (BUILT_IN_LDEXP):
10095 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10096 CASE_FLT_FN (BUILT_IN_SCALBN):
10097 CASE_FLT_FN (BUILT_IN_SCALBLN):
10098 return fold_builtin_load_exponent (loc, arg0, arg1,
10099 type, /*ldexp=*/false);
10101 CASE_FLT_FN (BUILT_IN_FREXP):
10102 return fold_builtin_frexp (loc, arg0, arg1, type);
10104 CASE_FLT_FN (BUILT_IN_MODF):
10105 return fold_builtin_modf (loc, arg0, arg1, type);
10107 case BUILT_IN_BZERO:
10108 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10110 case BUILT_IN_FPUTS:
10111 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10113 case BUILT_IN_FPUTS_UNLOCKED:
10114 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10116 case BUILT_IN_STRSTR:
10117 return fold_builtin_strstr (loc, arg0, arg1, type);
10119 case BUILT_IN_STRCAT:
10120 return fold_builtin_strcat (loc, arg0, arg1);
10122 case BUILT_IN_STRSPN:
10123 return fold_builtin_strspn (loc, arg0, arg1);
10125 case BUILT_IN_STRCSPN:
10126 return fold_builtin_strcspn (loc, arg0, arg1);
10128 case BUILT_IN_STRCHR:
10129 case BUILT_IN_INDEX:
10130 return fold_builtin_strchr (loc, arg0, arg1, type);
10132 case BUILT_IN_STRRCHR:
10133 case BUILT_IN_RINDEX:
10134 return fold_builtin_strrchr (loc, arg0, arg1, type);
10136 case BUILT_IN_STRCPY:
10137 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10139 case BUILT_IN_STPCPY:
10142 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10146 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10149 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10152 case BUILT_IN_STRCMP:
10153 return fold_builtin_strcmp (loc, arg0, arg1);
10155 case BUILT_IN_STRPBRK:
10156 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10158 case BUILT_IN_EXPECT:
10159 return fold_builtin_expect (loc, arg0, arg1);
10161 CASE_FLT_FN (BUILT_IN_POW):
10162 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10164 CASE_FLT_FN (BUILT_IN_POWI):
10165 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10167 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10168 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10170 CASE_FLT_FN (BUILT_IN_FMIN):
10171 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10173 CASE_FLT_FN (BUILT_IN_FMAX):
10174 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10176 case BUILT_IN_ISGREATER:
10177 return fold_builtin_unordered_cmp (loc, fndecl,
10178 arg0, arg1, UNLE_EXPR, LE_EXPR);
10179 case BUILT_IN_ISGREATEREQUAL:
10180 return fold_builtin_unordered_cmp (loc, fndecl,
10181 arg0, arg1, UNLT_EXPR, LT_EXPR);
10182 case BUILT_IN_ISLESS:
10183 return fold_builtin_unordered_cmp (loc, fndecl,
10184 arg0, arg1, UNGE_EXPR, GE_EXPR);
10185 case BUILT_IN_ISLESSEQUAL:
10186 return fold_builtin_unordered_cmp (loc, fndecl,
10187 arg0, arg1, UNGT_EXPR, GT_EXPR);
10188 case BUILT_IN_ISLESSGREATER:
10189 return fold_builtin_unordered_cmp (loc, fndecl,
10190 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10191 case BUILT_IN_ISUNORDERED:
10192 return fold_builtin_unordered_cmp (loc, fndecl,
10193 arg0, arg1, UNORDERED_EXPR,
10196 /* We do the folding for va_start in the expander. */
10197 case BUILT_IN_VA_START:
10200 case BUILT_IN_SPRINTF:
10201 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10203 case BUILT_IN_OBJECT_SIZE:
10204 return fold_builtin_object_size (arg0, arg1);
10206 case BUILT_IN_PRINTF:
10207 case BUILT_IN_PRINTF_UNLOCKED:
10208 case BUILT_IN_VPRINTF:
10209 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10211 case BUILT_IN_PRINTF_CHK:
10212 case BUILT_IN_VPRINTF_CHK:
10213 if (!validate_arg (arg0, INTEGER_TYPE)
10214 || TREE_SIDE_EFFECTS (arg0))
10217 return fold_builtin_printf (loc, fndecl,
10218 arg1, NULL_TREE, ignore, fcode);
10221 case BUILT_IN_FPRINTF:
10222 case BUILT_IN_FPRINTF_UNLOCKED:
10223 case BUILT_IN_VFPRINTF:
10224 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10233 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10234 and ARG2. IGNORE is true if the result of the function call is ignored.
10235 This function returns NULL_TREE if no simplification was possible. */
10238 fold_builtin_3 (location_t loc, tree fndecl,
10239 tree arg0, tree arg1, tree arg2, bool ignore)
10241 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10242 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10246 CASE_FLT_FN (BUILT_IN_SINCOS):
10247 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10249 CASE_FLT_FN (BUILT_IN_FMA):
10250 if (validate_arg (arg0, REAL_TYPE)
10251 && validate_arg(arg1, REAL_TYPE)
10252 && validate_arg(arg2, REAL_TYPE))
10253 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10256 CASE_FLT_FN (BUILT_IN_REMQUO):
10257 if (validate_arg (arg0, REAL_TYPE)
10258 && validate_arg(arg1, REAL_TYPE)
10259 && validate_arg(arg2, POINTER_TYPE))
10260 return do_mpfr_remquo (arg0, arg1, arg2);
10263 case BUILT_IN_MEMSET:
10264 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10266 case BUILT_IN_BCOPY:
10267 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10268 void_type_node, true, /*endp=*/3);
10270 case BUILT_IN_MEMCPY:
10271 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10272 type, ignore, /*endp=*/0);
10274 case BUILT_IN_MEMPCPY:
10275 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10276 type, ignore, /*endp=*/1);
10278 case BUILT_IN_MEMMOVE:
10279 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10280 type, ignore, /*endp=*/3);
10282 case BUILT_IN_STRNCAT:
10283 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10285 case BUILT_IN_STRNCPY:
10286 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10288 case BUILT_IN_STRNCMP:
10289 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10291 case BUILT_IN_MEMCHR:
10292 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10294 case BUILT_IN_BCMP:
10295 case BUILT_IN_MEMCMP:
10296 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10298 case BUILT_IN_SPRINTF:
10299 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10301 case BUILT_IN_STRCPY_CHK:
10302 case BUILT_IN_STPCPY_CHK:
10303 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10306 case BUILT_IN_STRCAT_CHK:
10307 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10309 case BUILT_IN_PRINTF_CHK:
10310 case BUILT_IN_VPRINTF_CHK:
10311 if (!validate_arg (arg0, INTEGER_TYPE)
10312 || TREE_SIDE_EFFECTS (arg0))
10315 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10318 case BUILT_IN_FPRINTF:
10319 case BUILT_IN_FPRINTF_UNLOCKED:
10320 case BUILT_IN_VFPRINTF:
10321 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10324 case BUILT_IN_FPRINTF_CHK:
10325 case BUILT_IN_VFPRINTF_CHK:
10326 if (!validate_arg (arg1, INTEGER_TYPE)
10327 || TREE_SIDE_EFFECTS (arg1))
10330 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10339 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10340 ARG2, and ARG3. IGNORE is true if the result of the function call is
10341 ignored. This function returns NULL_TREE if no simplification was
10345 fold_builtin_4 (location_t loc, tree fndecl,
10346 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10348 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10352 case BUILT_IN_MEMCPY_CHK:
10353 case BUILT_IN_MEMPCPY_CHK:
10354 case BUILT_IN_MEMMOVE_CHK:
10355 case BUILT_IN_MEMSET_CHK:
10356 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10358 DECL_FUNCTION_CODE (fndecl));
10360 case BUILT_IN_STRNCPY_CHK:
10361 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10363 case BUILT_IN_STRNCAT_CHK:
10364 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10366 case BUILT_IN_FPRINTF_CHK:
10367 case BUILT_IN_VFPRINTF_CHK:
10368 if (!validate_arg (arg1, INTEGER_TYPE)
10369 || TREE_SIDE_EFFECTS (arg1))
10372 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10382 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10383 arguments, where NARGS <= 4. IGNORE is true if the result of the
10384 function call is ignored. This function returns NULL_TREE if no
10385 simplification was possible. Note that this only folds builtins with
10386 fixed argument patterns. Foldings that do varargs-to-varargs
10387 transformations, or that match calls with more than 4 arguments,
10388 need to be handled with fold_builtin_varargs instead. */
10390 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10393 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10395 tree ret = NULL_TREE;
10400 ret = fold_builtin_0 (loc, fndecl, ignore);
10403 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10406 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10409 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10412 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10420 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10421 SET_EXPR_LOCATION (ret, loc);
10422 TREE_NO_WARNING (ret) = 1;
10428 /* Builtins with folding operations that operate on "..." arguments
10429 need special handling; we need to store the arguments in a convenient
10430 data structure before attempting any folding. Fortunately there are
10431 only a few builtins that fall into this category. FNDECL is the
10432 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10433 result of the function call is ignored. */
10436 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10437 bool ignore ATTRIBUTE_UNUSED)
10439 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10440 tree ret = NULL_TREE;
10444 case BUILT_IN_SPRINTF_CHK:
10445 case BUILT_IN_VSPRINTF_CHK:
10446 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10449 case BUILT_IN_SNPRINTF_CHK:
10450 case BUILT_IN_VSNPRINTF_CHK:
10451 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10454 case BUILT_IN_FPCLASSIFY:
10455 ret = fold_builtin_fpclassify (loc, exp);
10463 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10464 SET_EXPR_LOCATION (ret, loc);
10465 TREE_NO_WARNING (ret) = 1;
10471 /* Return true if FNDECL shouldn't be folded right now.
10472 If a built-in function has an inline attribute always_inline
10473 wrapper, defer folding it after always_inline functions have
10474 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10475 might not be performed. */
10478 avoid_folding_inline_builtin (tree fndecl)
10480 return (DECL_DECLARED_INLINE_P (fndecl)
10481 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10483 && !cfun->always_inline_functions_inlined
10484 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10487 /* A wrapper function for builtin folding that prevents warnings for
10488 "statement without effect" and the like, caused by removing the
10489 call node earlier than the warning is generated. */
10492 fold_call_expr (location_t loc, tree exp, bool ignore)
10494 tree ret = NULL_TREE;
10495 tree fndecl = get_callee_fndecl (exp);
10497 && TREE_CODE (fndecl) == FUNCTION_DECL
10498 && DECL_BUILT_IN (fndecl)
10499 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10500 yet. Defer folding until we see all the arguments
10501 (after inlining). */
10502 && !CALL_EXPR_VA_ARG_PACK (exp))
10504 int nargs = call_expr_nargs (exp);
10506 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10507 instead last argument is __builtin_va_arg_pack (). Defer folding
10508 even in that case, until arguments are finalized. */
10509 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10511 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10513 && TREE_CODE (fndecl2) == FUNCTION_DECL
10514 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10515 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10519 if (avoid_folding_inline_builtin (fndecl))
10522 /* FIXME: Don't use a list in this interface. */
10523 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10524 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10527 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10529 tree *args = CALL_EXPR_ARGP (exp);
10530 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10533 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10541 /* Conveniently construct a function call expression. FNDECL names the
10542 function to be called and ARGLIST is a TREE_LIST of arguments. */
10545 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10547 tree fntype = TREE_TYPE (fndecl);
10548 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10549 int n = list_length (arglist);
10550 tree *argarray = (tree *) alloca (n * sizeof (tree));
10553 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10554 argarray[i] = TREE_VALUE (arglist);
10555 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10558 /* Conveniently construct a function call expression. FNDECL names the
10559 function to be called, N is the number of arguments, and the "..."
10560 parameters are the argument expressions. */
10563 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10566 tree fntype = TREE_TYPE (fndecl);
10567 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10568 tree *argarray = (tree *) alloca (n * sizeof (tree));
10572 for (i = 0; i < n; i++)
10573 argarray[i] = va_arg (ap, tree);
10575 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10578 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10579 N arguments are passed in the array ARGARRAY. */
10582 fold_builtin_call_array (location_t loc, tree type,
10587 tree ret = NULL_TREE;
10591 if (TREE_CODE (fn) == ADDR_EXPR)
10593 tree fndecl = TREE_OPERAND (fn, 0);
10594 if (TREE_CODE (fndecl) == FUNCTION_DECL
10595 && DECL_BUILT_IN (fndecl))
10597 /* If last argument is __builtin_va_arg_pack (), arguments to this
10598 function are not finalized yet. Defer folding until they are. */
10599 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10601 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10603 && TREE_CODE (fndecl2) == FUNCTION_DECL
10604 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10605 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10606 return build_call_array_loc (loc, type, fn, n, argarray);
10608 if (avoid_folding_inline_builtin (fndecl))
10609 return build_call_array_loc (loc, type, fn, n, argarray);
10610 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10612 tree arglist = NULL_TREE;
10613 for (i = n - 1; i >= 0; i--)
10614 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10615 ret = targetm.fold_builtin (fndecl, arglist, false);
10618 return build_call_array_loc (loc, type, fn, n, argarray);
10620 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10622 /* First try the transformations that don't require consing up
10624 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10629 /* If we got this far, we need to build an exp. */
10630 exp = build_call_array_loc (loc, type, fn, n, argarray);
10631 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10632 return ret ? ret : exp;
10636 return build_call_array_loc (loc, type, fn, n, argarray);
10639 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10640 along with N new arguments specified as the "..." parameters. SKIP
10641 is the number of arguments in EXP to be omitted. This function is used
10642 to do varargs-to-varargs transformations. */
10645 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10647 int oldnargs = call_expr_nargs (exp);
10648 int nargs = oldnargs - skip + n;
10649 tree fntype = TREE_TYPE (fndecl);
10650 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10658 buffer = XALLOCAVEC (tree, nargs);
10660 for (i = 0; i < n; i++)
10661 buffer[i] = va_arg (ap, tree);
10663 for (j = skip; j < oldnargs; j++, i++)
10664 buffer[i] = CALL_EXPR_ARG (exp, j);
10667 buffer = CALL_EXPR_ARGP (exp) + skip;
10669 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10672 /* Validate a single argument ARG against a tree code CODE representing
10676 validate_arg (const_tree arg, enum tree_code code)
10680 else if (code == POINTER_TYPE)
10681 return POINTER_TYPE_P (TREE_TYPE (arg));
10682 else if (code == INTEGER_TYPE)
10683 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10684 return code == TREE_CODE (TREE_TYPE (arg));
10687 /* This function validates the types of a function call argument list
10688 against a specified list of tree_codes. If the last specifier is a 0,
10689 that represents an ellipses, otherwise the last specifier must be a
10692 This is the GIMPLE version of validate_arglist. Eventually we want to
10693 completely convert builtins.c to work from GIMPLEs and the tree based
10694 validate_arglist will then be removed. */
10697 validate_gimple_arglist (const_gimple call, ...)
10699 enum tree_code code;
10705 va_start (ap, call);
10710 code = (enum tree_code) va_arg (ap, int);
10714 /* This signifies an ellipses, any further arguments are all ok. */
10718 /* This signifies an endlink, if no arguments remain, return
10719 true, otherwise return false. */
10720 res = (i == gimple_call_num_args (call));
10723 /* If no parameters remain or the parameter's code does not
10724 match the specified code, return false. Otherwise continue
10725 checking any remaining arguments. */
10726 arg = gimple_call_arg (call, i++);
10727 if (!validate_arg (arg, code))
10734 /* We need gotos here since we can only have one VA_CLOSE in a
10742 /* This function validates the types of a function call argument list
10743 against a specified list of tree_codes. If the last specifier is a 0,
10744 that represents an ellipses, otherwise the last specifier must be a
10748 validate_arglist (const_tree callexpr, ...)
10750 enum tree_code code;
10753 const_call_expr_arg_iterator iter;
10756 va_start (ap, callexpr);
10757 init_const_call_expr_arg_iterator (callexpr, &iter);
10761 code = (enum tree_code) va_arg (ap, int);
10765 /* This signifies an ellipses, any further arguments are all ok. */
10769 /* This signifies an endlink, if no arguments remain, return
10770 true, otherwise return false. */
10771 res = !more_const_call_expr_args_p (&iter);
10774 /* If no parameters remain or the parameter's code does not
10775 match the specified code, return false. Otherwise continue
10776 checking any remaining arguments. */
10777 arg = next_const_call_expr_arg (&iter);
10778 if (!validate_arg (arg, code))
10785 /* We need gotos here since we can only have one VA_CLOSE in a
10793 /* Default target-specific builtin expander that does nothing. */
10796 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10797 rtx target ATTRIBUTE_UNUSED,
10798 rtx subtarget ATTRIBUTE_UNUSED,
10799 enum machine_mode mode ATTRIBUTE_UNUSED,
10800 int ignore ATTRIBUTE_UNUSED)
10805 /* Returns true is EXP represents data that would potentially reside
10806 in a readonly section. */
10809 readonly_data_expr (tree exp)
10813 if (TREE_CODE (exp) != ADDR_EXPR)
10816 exp = get_base_address (TREE_OPERAND (exp, 0));
10820 /* Make sure we call decl_readonly_section only for trees it
10821 can handle (since it returns true for everything it doesn't
10823 if (TREE_CODE (exp) == STRING_CST
10824 || TREE_CODE (exp) == CONSTRUCTOR
10825 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10826 return decl_readonly_section (exp, 0);
10831 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10832 to the call, and TYPE is its return type.
10834 Return NULL_TREE if no simplification was possible, otherwise return the
10835 simplified form of the call as a tree.
10837 The simplified form may be a constant or other expression which
10838 computes the same value, but in a more efficient manner (including
10839 calls to other builtin functions).
10841 The call may contain arguments which need to be evaluated, but
10842 which are not useful to determine the result of the call. In
10843 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10844 COMPOUND_EXPR will be an argument which must be evaluated.
10845 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10846 COMPOUND_EXPR in the chain will contain the tree for the simplified
10847 form of the builtin function call. */
10850 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10852 if (!validate_arg (s1, POINTER_TYPE)
10853 || !validate_arg (s2, POINTER_TYPE))
10858 const char *p1, *p2;
10860 p2 = c_getstr (s2);
10864 p1 = c_getstr (s1);
10867 const char *r = strstr (p1, p2);
10871 return build_int_cst (TREE_TYPE (s1), 0);
10873 /* Return an offset into the constant string argument. */
10874 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10875 s1, size_int (r - p1));
10876 return fold_convert_loc (loc, type, tem);
10879 /* The argument is const char *, and the result is char *, so we need
10880 a type conversion here to avoid a warning. */
10882 return fold_convert_loc (loc, type, s1);
10887 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10891 /* New argument list transforming strstr(s1, s2) to
10892 strchr(s1, s2[0]). */
10893 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10897 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10898 the call, and TYPE is its return type.
10900 Return NULL_TREE if no simplification was possible, otherwise return the
10901 simplified form of the call as a tree.
10903 The simplified form may be a constant or other expression which
10904 computes the same value, but in a more efficient manner (including
10905 calls to other builtin functions).
10907 The call may contain arguments which need to be evaluated, but
10908 which are not useful to determine the result of the call. In
10909 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10910 COMPOUND_EXPR will be an argument which must be evaluated.
10911 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10912 COMPOUND_EXPR in the chain will contain the tree for the simplified
10913 form of the builtin function call. */
10916 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10918 if (!validate_arg (s1, POINTER_TYPE)
10919 || !validate_arg (s2, INTEGER_TYPE))
10925 if (TREE_CODE (s2) != INTEGER_CST)
10928 p1 = c_getstr (s1);
10935 if (target_char_cast (s2, &c))
10938 r = strchr (p1, c);
10941 return build_int_cst (TREE_TYPE (s1), 0);
10943 /* Return an offset into the constant string argument. */
10944 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10945 s1, size_int (r - p1));
10946 return fold_convert_loc (loc, type, tem);
10952 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10953 the call, and TYPE is its return type.
10955 Return NULL_TREE if no simplification was possible, otherwise return the
10956 simplified form of the call as a tree.
10958 The simplified form may be a constant or other expression which
10959 computes the same value, but in a more efficient manner (including
10960 calls to other builtin functions).
10962 The call may contain arguments which need to be evaluated, but
10963 which are not useful to determine the result of the call. In
10964 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10965 COMPOUND_EXPR will be an argument which must be evaluated.
10966 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10967 COMPOUND_EXPR in the chain will contain the tree for the simplified
10968 form of the builtin function call. */
10971 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10973 if (!validate_arg (s1, POINTER_TYPE)
10974 || !validate_arg (s2, INTEGER_TYPE))
10981 if (TREE_CODE (s2) != INTEGER_CST)
10984 p1 = c_getstr (s1);
10991 if (target_char_cast (s2, &c))
10994 r = strrchr (p1, c);
10997 return build_int_cst (TREE_TYPE (s1), 0);
10999 /* Return an offset into the constant string argument. */
11000 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11001 s1, size_int (r - p1));
11002 return fold_convert_loc (loc, type, tem);
11005 if (! integer_zerop (s2))
11008 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11012 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11013 return build_call_expr_loc (loc, fn, 2, s1, s2);
11017 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11018 to the call, and TYPE is its return type.
11020 Return NULL_TREE if no simplification was possible, otherwise return the
11021 simplified form of the call as a tree.
11023 The simplified form may be a constant or other expression which
11024 computes the same value, but in a more efficient manner (including
11025 calls to other builtin functions).
11027 The call may contain arguments which need to be evaluated, but
11028 which are not useful to determine the result of the call. In
11029 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11030 COMPOUND_EXPR will be an argument which must be evaluated.
11031 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11032 COMPOUND_EXPR in the chain will contain the tree for the simplified
11033 form of the builtin function call. */
11036 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11038 if (!validate_arg (s1, POINTER_TYPE)
11039 || !validate_arg (s2, POINTER_TYPE))
11044 const char *p1, *p2;
11046 p2 = c_getstr (s2);
11050 p1 = c_getstr (s1);
11053 const char *r = strpbrk (p1, p2);
11057 return build_int_cst (TREE_TYPE (s1), 0);
11059 /* Return an offset into the constant string argument. */
11060 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11061 s1, size_int (r - p1));
11062 return fold_convert_loc (loc, type, tem);
11066 /* strpbrk(x, "") == NULL.
11067 Evaluate and ignore s1 in case it had side-effects. */
11068 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11071 return NULL_TREE; /* Really call strpbrk. */
11073 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11077 /* New argument list transforming strpbrk(s1, s2) to
11078 strchr(s1, s2[0]). */
11079 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11083 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11086 Return NULL_TREE if no simplification was possible, otherwise return the
11087 simplified form of the call as a tree.
11089 The simplified form may be a constant or other expression which
11090 computes the same value, but in a more efficient manner (including
11091 calls to other builtin functions).
11093 The call may contain arguments which need to be evaluated, but
11094 which are not useful to determine the result of the call. In
11095 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11096 COMPOUND_EXPR will be an argument which must be evaluated.
11097 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11098 COMPOUND_EXPR in the chain will contain the tree for the simplified
11099 form of the builtin function call. */
11102 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11104 if (!validate_arg (dst, POINTER_TYPE)
11105 || !validate_arg (src, POINTER_TYPE))
11109 const char *p = c_getstr (src);
11111 /* If the string length is zero, return the dst parameter. */
11112 if (p && *p == '\0')
11115 if (optimize_insn_for_speed_p ())
11117 /* See if we can store by pieces into (dst + strlen(dst)). */
11119 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11120 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11122 if (!strlen_fn || !strcpy_fn)
11125 /* If we don't have a movstr we don't want to emit an strcpy
11126 call. We have to do that if the length of the source string
11127 isn't computable (in that case we can use memcpy probably
11128 later expanding to a sequence of mov instructions). If we
11129 have movstr instructions we can emit strcpy calls. */
11132 tree len = c_strlen (src, 1);
11133 if (! len || TREE_SIDE_EFFECTS (len))
11137 /* Stabilize the argument list. */
11138 dst = builtin_save_expr (dst);
11140 /* Create strlen (dst). */
11141 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11142 /* Create (dst p+ strlen (dst)). */
11144 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11145 TREE_TYPE (dst), dst, newdst);
11146 newdst = builtin_save_expr (newdst);
11148 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11149 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11155 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11156 arguments to the call.
11158 Return NULL_TREE if no simplification was possible, otherwise return the
11159 simplified form of the call as a tree.
11161 The simplified form may be a constant or other expression which
11162 computes the same value, but in a more efficient manner (including
11163 calls to other builtin functions).
11165 The call may contain arguments which need to be evaluated, but
11166 which are not useful to determine the result of the call. In
11167 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11168 COMPOUND_EXPR will be an argument which must be evaluated.
11169 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11170 COMPOUND_EXPR in the chain will contain the tree for the simplified
11171 form of the builtin function call. */
11174 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11176 if (!validate_arg (dst, POINTER_TYPE)
11177 || !validate_arg (src, POINTER_TYPE)
11178 || !validate_arg (len, INTEGER_TYPE))
11182 const char *p = c_getstr (src);
11184 /* If the requested length is zero, or the src parameter string
11185 length is zero, return the dst parameter. */
11186 if (integer_zerop (len) || (p && *p == '\0'))
11187 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11189 /* If the requested len is greater than or equal to the string
11190 length, call strcat. */
11191 if (TREE_CODE (len) == INTEGER_CST && p
11192 && compare_tree_int (len, strlen (p)) >= 0)
11194 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11196 /* If the replacement _DECL isn't initialized, don't do the
11201 return build_call_expr_loc (loc, fn, 2, dst, src);
11207 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11210 Return NULL_TREE if no simplification was possible, otherwise return the
11211 simplified form of the call as a tree.
11213 The simplified form may be a constant or other expression which
11214 computes the same value, but in a more efficient manner (including
11215 calls to other builtin functions).
11217 The call may contain arguments which need to be evaluated, but
11218 which are not useful to determine the result of the call. In
11219 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11220 COMPOUND_EXPR will be an argument which must be evaluated.
11221 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11222 COMPOUND_EXPR in the chain will contain the tree for the simplified
11223 form of the builtin function call. */
11226 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11228 if (!validate_arg (s1, POINTER_TYPE)
11229 || !validate_arg (s2, POINTER_TYPE))
11233 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11235 /* If both arguments are constants, evaluate at compile-time. */
11238 const size_t r = strspn (p1, p2);
11239 return size_int (r);
11242 /* If either argument is "", return NULL_TREE. */
11243 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11244 /* Evaluate and ignore both arguments in case either one has
11246 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11252 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11255 Return NULL_TREE if no simplification was possible, otherwise return the
11256 simplified form of the call as a tree.
11258 The simplified form may be a constant or other expression which
11259 computes the same value, but in a more efficient manner (including
11260 calls to other builtin functions).
11262 The call may contain arguments which need to be evaluated, but
11263 which are not useful to determine the result of the call. In
11264 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11265 COMPOUND_EXPR will be an argument which must be evaluated.
11266 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11267 COMPOUND_EXPR in the chain will contain the tree for the simplified
11268 form of the builtin function call. */
11271 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11273 if (!validate_arg (s1, POINTER_TYPE)
11274 || !validate_arg (s2, POINTER_TYPE))
11278 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11280 /* If both arguments are constants, evaluate at compile-time. */
11283 const size_t r = strcspn (p1, p2);
11284 return size_int (r);
11287 /* If the first argument is "", return NULL_TREE. */
11288 if (p1 && *p1 == '\0')
11290 /* Evaluate and ignore argument s2 in case it has
11292 return omit_one_operand_loc (loc, size_type_node,
11293 size_zero_node, s2);
11296 /* If the second argument is "", return __builtin_strlen(s1). */
11297 if (p2 && *p2 == '\0')
11299 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11301 /* If the replacement _DECL isn't initialized, don't do the
11306 return build_call_expr_loc (loc, fn, 1, s1);
11312 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11313 to the call. IGNORE is true if the value returned
11314 by the builtin will be ignored. UNLOCKED is true is true if this
11315 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11316 the known length of the string. Return NULL_TREE if no simplification
11320 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11321 bool ignore, bool unlocked, tree len)
11323 /* If we're using an unlocked function, assume the other unlocked
11324 functions exist explicitly. */
11325 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11326 : implicit_built_in_decls[BUILT_IN_FPUTC];
11327 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11328 : implicit_built_in_decls[BUILT_IN_FWRITE];
11330 /* If the return value is used, don't do the transformation. */
11334 /* Verify the arguments in the original call. */
11335 if (!validate_arg (arg0, POINTER_TYPE)
11336 || !validate_arg (arg1, POINTER_TYPE))
11340 len = c_strlen (arg0, 0);
11342 /* Get the length of the string passed to fputs. If the length
11343 can't be determined, punt. */
11345 || TREE_CODE (len) != INTEGER_CST)
11348 switch (compare_tree_int (len, 1))
11350 case -1: /* length is 0, delete the call entirely . */
11351 return omit_one_operand_loc (loc, integer_type_node,
11352 integer_zero_node, arg1);;
11354 case 0: /* length is 1, call fputc. */
11356 const char *p = c_getstr (arg0);
11361 return build_call_expr_loc (loc, fn_fputc, 2,
11362 build_int_cst (NULL_TREE, p[0]), arg1);
11368 case 1: /* length is greater than 1, call fwrite. */
11370 /* If optimizing for size keep fputs. */
11371 if (optimize_function_for_size_p (cfun))
11373 /* New argument list transforming fputs(string, stream) to
11374 fwrite(string, 1, len, stream). */
11376 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11377 size_one_node, len, arg1);
11382 gcc_unreachable ();
11387 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11388 produced. False otherwise. This is done so that we don't output the error
11389 or warning twice or three times. */
11392 fold_builtin_next_arg (tree exp, bool va_start_p)
11394 tree fntype = TREE_TYPE (current_function_decl);
11395 int nargs = call_expr_nargs (exp);
11398 if (TYPE_ARG_TYPES (fntype) == 0
11399 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11400 == void_type_node))
11402 error ("%<va_start%> used in function with fixed args");
11408 if (va_start_p && (nargs != 2))
11410 error ("wrong number of arguments to function %<va_start%>");
11413 arg = CALL_EXPR_ARG (exp, 1);
11415 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11416 when we checked the arguments and if needed issued a warning. */
11421 /* Evidently an out of date version of <stdarg.h>; can't validate
11422 va_start's second argument, but can still work as intended. */
11423 warning (0, "%<__builtin_next_arg%> called without an argument");
11426 else if (nargs > 1)
11428 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11431 arg = CALL_EXPR_ARG (exp, 0);
11434 if (TREE_CODE (arg) == SSA_NAME)
11435 arg = SSA_NAME_VAR (arg);
11437 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11438 or __builtin_next_arg (0) the first time we see it, after checking
11439 the arguments and if needed issuing a warning. */
11440 if (!integer_zerop (arg))
11442 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11444 /* Strip off all nops for the sake of the comparison. This
11445 is not quite the same as STRIP_NOPS. It does more.
11446 We must also strip off INDIRECT_EXPR for C++ reference
11448 while (CONVERT_EXPR_P (arg)
11449 || TREE_CODE (arg) == INDIRECT_REF)
11450 arg = TREE_OPERAND (arg, 0);
11451 if (arg != last_parm)
11453 /* FIXME: Sometimes with the tree optimizers we can get the
11454 not the last argument even though the user used the last
11455 argument. We just warn and set the arg to be the last
11456 argument so that we will get wrong-code because of
11458 warning (0, "second parameter of %<va_start%> not last named argument");
11461 /* Undefined by C99 7.15.1.4p4 (va_start):
11462 "If the parameter parmN is declared with the register storage
11463 class, with a function or array type, or with a type that is
11464 not compatible with the type that results after application of
11465 the default argument promotions, the behavior is undefined."
11467 else if (DECL_REGISTER (arg))
11468 warning (0, "undefined behaviour when second parameter of "
11469 "%<va_start%> is declared with %<register%> storage");
11471 /* We want to verify the second parameter just once before the tree
11472 optimizers are run and then avoid keeping it in the tree,
11473 as otherwise we could warn even for correct code like:
11474 void foo (int i, ...)
11475 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11477 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11479 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11485 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11486 ORIG may be null if this is a 2-argument call. We don't attempt to
11487 simplify calls with more than 3 arguments.
11489 Return NULL_TREE if no simplification was possible, otherwise return the
11490 simplified form of the call as a tree. If IGNORED is true, it means that
11491 the caller does not use the returned value of the function. */
11494 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11495 tree orig, int ignored)
11498 const char *fmt_str = NULL;
11500 /* Verify the required arguments in the original call. We deal with two
11501 types of sprintf() calls: 'sprintf (str, fmt)' and
11502 'sprintf (dest, "%s", orig)'. */
11503 if (!validate_arg (dest, POINTER_TYPE)
11504 || !validate_arg (fmt, POINTER_TYPE))
11506 if (orig && !validate_arg (orig, POINTER_TYPE))
11509 /* Check whether the format is a literal string constant. */
11510 fmt_str = c_getstr (fmt);
11511 if (fmt_str == NULL)
11515 retval = NULL_TREE;
11517 if (!init_target_chars ())
11520 /* If the format doesn't contain % args or %%, use strcpy. */
11521 if (strchr (fmt_str, target_percent) == NULL)
11523 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11528 /* Don't optimize sprintf (buf, "abc", ptr++). */
11532 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11533 'format' is known to contain no % formats. */
11534 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11536 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11539 /* If the format is "%s", use strcpy if the result isn't used. */
11540 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11543 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11548 /* Don't crash on sprintf (str1, "%s"). */
11552 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11555 retval = c_strlen (orig, 1);
11556 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11559 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11562 if (call && retval)
11564 retval = fold_convert_loc
11565 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11567 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11573 /* Expand a call EXP to __builtin_object_size. */
11576 expand_builtin_object_size (tree exp)
11579 int object_size_type;
11580 tree fndecl = get_callee_fndecl (exp);
11582 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11584 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11586 expand_builtin_trap ();
11590 ost = CALL_EXPR_ARG (exp, 1);
11593 if (TREE_CODE (ost) != INTEGER_CST
11594 || tree_int_cst_sgn (ost) < 0
11595 || compare_tree_int (ost, 3) > 0)
11597 error ("%Klast argument of %D is not integer constant between 0 and 3",
11599 expand_builtin_trap ();
11603 object_size_type = tree_low_cst (ost, 0);
11605 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11608 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11609 FCODE is the BUILT_IN_* to use.
11610 Return NULL_RTX if we failed; the caller should emit a normal call,
11611 otherwise try to get the result in TARGET, if convenient (and in
11612 mode MODE if that's convenient). */
11615 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11616 enum built_in_function fcode)
11618 tree dest, src, len, size;
11620 if (!validate_arglist (exp,
11622 fcode == BUILT_IN_MEMSET_CHK
11623 ? INTEGER_TYPE : POINTER_TYPE,
11624 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11627 dest = CALL_EXPR_ARG (exp, 0);
11628 src = CALL_EXPR_ARG (exp, 1);
11629 len = CALL_EXPR_ARG (exp, 2);
11630 size = CALL_EXPR_ARG (exp, 3);
11632 if (! host_integerp (size, 1))
11635 if (host_integerp (len, 1) || integer_all_onesp (size))
11639 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11641 warning_at (tree_nonartificial_location (exp),
11642 0, "%Kcall to %D will always overflow destination buffer",
11643 exp, get_callee_fndecl (exp));
11648 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11649 mem{cpy,pcpy,move,set} is available. */
11652 case BUILT_IN_MEMCPY_CHK:
11653 fn = built_in_decls[BUILT_IN_MEMCPY];
11655 case BUILT_IN_MEMPCPY_CHK:
11656 fn = built_in_decls[BUILT_IN_MEMPCPY];
11658 case BUILT_IN_MEMMOVE_CHK:
11659 fn = built_in_decls[BUILT_IN_MEMMOVE];
11661 case BUILT_IN_MEMSET_CHK:
11662 fn = built_in_decls[BUILT_IN_MEMSET];
11671 fn = build_call_nofold (fn, 3, dest, src, len);
11672 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11673 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11674 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11676 else if (fcode == BUILT_IN_MEMSET_CHK)
11680 unsigned int dest_align
11681 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11683 /* If DEST is not a pointer type, call the normal function. */
11684 if (dest_align == 0)
11687 /* If SRC and DEST are the same (and not volatile), do nothing. */
11688 if (operand_equal_p (src, dest, 0))
11692 if (fcode != BUILT_IN_MEMPCPY_CHK)
11694 /* Evaluate and ignore LEN in case it has side-effects. */
11695 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11696 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11699 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11700 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11703 /* __memmove_chk special case. */
11704 if (fcode == BUILT_IN_MEMMOVE_CHK)
11706 unsigned int src_align
11707 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11709 if (src_align == 0)
11712 /* If src is categorized for a readonly section we can use
11713 normal __memcpy_chk. */
11714 if (readonly_data_expr (src))
11716 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11719 fn = build_call_nofold (fn, 4, dest, src, len, size);
11720 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11721 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11722 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11729 /* Emit warning if a buffer overflow is detected at compile time. */
11732 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11736 location_t loc = tree_nonartificial_location (exp);
11740 case BUILT_IN_STRCPY_CHK:
11741 case BUILT_IN_STPCPY_CHK:
11742 /* For __strcat_chk the warning will be emitted only if overflowing
11743 by at least strlen (dest) + 1 bytes. */
11744 case BUILT_IN_STRCAT_CHK:
11745 len = CALL_EXPR_ARG (exp, 1);
11746 size = CALL_EXPR_ARG (exp, 2);
11749 case BUILT_IN_STRNCAT_CHK:
11750 case BUILT_IN_STRNCPY_CHK:
11751 len = CALL_EXPR_ARG (exp, 2);
11752 size = CALL_EXPR_ARG (exp, 3);
11754 case BUILT_IN_SNPRINTF_CHK:
11755 case BUILT_IN_VSNPRINTF_CHK:
11756 len = CALL_EXPR_ARG (exp, 1);
11757 size = CALL_EXPR_ARG (exp, 3);
11760 gcc_unreachable ();
11766 if (! host_integerp (size, 1) || integer_all_onesp (size))
11771 len = c_strlen (len, 1);
11772 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11775 else if (fcode == BUILT_IN_STRNCAT_CHK)
11777 tree src = CALL_EXPR_ARG (exp, 1);
11778 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11780 src = c_strlen (src, 1);
11781 if (! src || ! host_integerp (src, 1))
11783 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11784 exp, get_callee_fndecl (exp));
11787 else if (tree_int_cst_lt (src, size))
11790 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11793 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11794 exp, get_callee_fndecl (exp));
11797 /* Emit warning if a buffer overflow is detected at compile time
11798 in __sprintf_chk/__vsprintf_chk calls. */
11801 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11803 tree size, len, fmt;
11804 const char *fmt_str;
11805 int nargs = call_expr_nargs (exp);
11807 /* Verify the required arguments in the original call. */
11811 size = CALL_EXPR_ARG (exp, 2);
11812 fmt = CALL_EXPR_ARG (exp, 3);
11814 if (! host_integerp (size, 1) || integer_all_onesp (size))
11817 /* Check whether the format is a literal string constant. */
11818 fmt_str = c_getstr (fmt);
11819 if (fmt_str == NULL)
11822 if (!init_target_chars ())
11825 /* If the format doesn't contain % args or %%, we know its size. */
11826 if (strchr (fmt_str, target_percent) == 0)
11827 len = build_int_cstu (size_type_node, strlen (fmt_str));
11828 /* If the format is "%s" and first ... argument is a string literal,
11830 else if (fcode == BUILT_IN_SPRINTF_CHK
11831 && strcmp (fmt_str, target_percent_s) == 0)
11837 arg = CALL_EXPR_ARG (exp, 4);
11838 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11841 len = c_strlen (arg, 1);
11842 if (!len || ! host_integerp (len, 1))
11848 if (! tree_int_cst_lt (len, size))
11849 warning_at (tree_nonartificial_location (exp),
11850 0, "%Kcall to %D will always overflow destination buffer",
11851 exp, get_callee_fndecl (exp));
11854 /* Emit warning if a free is called with address of a variable. */
11857 maybe_emit_free_warning (tree exp)
11859 tree arg = CALL_EXPR_ARG (exp, 0);
11862 if (TREE_CODE (arg) != ADDR_EXPR)
11865 arg = get_base_address (TREE_OPERAND (arg, 0));
11866 if (arg == NULL || INDIRECT_REF_P (arg))
11869 if (SSA_VAR_P (arg))
11870 warning_at (tree_nonartificial_location (exp),
11871 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11873 warning_at (tree_nonartificial_location (exp),
11874 0, "%Kattempt to free a non-heap object", exp);
11877 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11881 fold_builtin_object_size (tree ptr, tree ost)
11883 tree ret = NULL_TREE;
11884 int object_size_type;
11886 if (!validate_arg (ptr, POINTER_TYPE)
11887 || !validate_arg (ost, INTEGER_TYPE))
11892 if (TREE_CODE (ost) != INTEGER_CST
11893 || tree_int_cst_sgn (ost) < 0
11894 || compare_tree_int (ost, 3) > 0)
11897 object_size_type = tree_low_cst (ost, 0);
11899 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11900 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11901 and (size_t) 0 for types 2 and 3. */
11902 if (TREE_SIDE_EFFECTS (ptr))
11903 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11905 if (TREE_CODE (ptr) == ADDR_EXPR)
11906 ret = build_int_cstu (size_type_node,
11907 compute_builtin_object_size (ptr, object_size_type));
11909 else if (TREE_CODE (ptr) == SSA_NAME)
11911 unsigned HOST_WIDE_INT bytes;
11913 /* If object size is not known yet, delay folding until
11914 later. Maybe subsequent passes will help determining
11916 bytes = compute_builtin_object_size (ptr, object_size_type);
11917 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11919 ret = build_int_cstu (size_type_node, bytes);
11924 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11925 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11926 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11933 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11934 DEST, SRC, LEN, and SIZE are the arguments to the call.
11935 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11936 code of the builtin. If MAXLEN is not NULL, it is maximum length
11937 passed as third argument. */
11940 fold_builtin_memory_chk (location_t loc, tree fndecl,
11941 tree dest, tree src, tree len, tree size,
11942 tree maxlen, bool ignore,
11943 enum built_in_function fcode)
11947 if (!validate_arg (dest, POINTER_TYPE)
11948 || !validate_arg (src,
11949 (fcode == BUILT_IN_MEMSET_CHK
11950 ? INTEGER_TYPE : POINTER_TYPE))
11951 || !validate_arg (len, INTEGER_TYPE)
11952 || !validate_arg (size, INTEGER_TYPE))
11955 /* If SRC and DEST are the same (and not volatile), return DEST
11956 (resp. DEST+LEN for __mempcpy_chk). */
11957 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11959 if (fcode != BUILT_IN_MEMPCPY_CHK)
11960 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11964 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11966 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11970 if (! host_integerp (size, 1))
11973 if (! integer_all_onesp (size))
11975 if (! host_integerp (len, 1))
11977 /* If LEN is not constant, try MAXLEN too.
11978 For MAXLEN only allow optimizing into non-_ocs function
11979 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11980 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11982 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11984 /* (void) __mempcpy_chk () can be optimized into
11985 (void) __memcpy_chk (). */
11986 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11990 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11998 if (tree_int_cst_lt (size, maxlen))
12003 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12004 mem{cpy,pcpy,move,set} is available. */
12007 case BUILT_IN_MEMCPY_CHK:
12008 fn = built_in_decls[BUILT_IN_MEMCPY];
12010 case BUILT_IN_MEMPCPY_CHK:
12011 fn = built_in_decls[BUILT_IN_MEMPCPY];
12013 case BUILT_IN_MEMMOVE_CHK:
12014 fn = built_in_decls[BUILT_IN_MEMMOVE];
12016 case BUILT_IN_MEMSET_CHK:
12017 fn = built_in_decls[BUILT_IN_MEMSET];
12026 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12029 /* Fold a call to the __st[rp]cpy_chk builtin.
12030 DEST, SRC, and SIZE are the arguments to the call.
12031 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12032 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12033 strings passed as second argument. */
12036 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12037 tree src, tree size,
12038 tree maxlen, bool ignore,
12039 enum built_in_function fcode)
12043 if (!validate_arg (dest, POINTER_TYPE)
12044 || !validate_arg (src, POINTER_TYPE)
12045 || !validate_arg (size, INTEGER_TYPE))
12048 /* If SRC and DEST are the same (and not volatile), return DEST. */
12049 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12050 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12052 if (! host_integerp (size, 1))
12055 if (! integer_all_onesp (size))
12057 len = c_strlen (src, 1);
12058 if (! len || ! host_integerp (len, 1))
12060 /* If LEN is not constant, try MAXLEN too.
12061 For MAXLEN only allow optimizing into non-_ocs function
12062 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12063 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12065 if (fcode == BUILT_IN_STPCPY_CHK)
12070 /* If return value of __stpcpy_chk is ignored,
12071 optimize into __strcpy_chk. */
12072 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12076 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12079 if (! len || TREE_SIDE_EFFECTS (len))
12082 /* If c_strlen returned something, but not a constant,
12083 transform __strcpy_chk into __memcpy_chk. */
12084 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12088 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12089 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12090 build_call_expr_loc (loc, fn, 4,
12091 dest, src, len, size));
12097 if (! tree_int_cst_lt (maxlen, size))
12101 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12102 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12103 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12107 return build_call_expr_loc (loc, fn, 2, dest, src);
12110 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12111 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12112 length passed as third argument. */
12115 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12116 tree len, tree size, tree maxlen)
12120 if (!validate_arg (dest, POINTER_TYPE)
12121 || !validate_arg (src, POINTER_TYPE)
12122 || !validate_arg (len, INTEGER_TYPE)
12123 || !validate_arg (size, INTEGER_TYPE))
12126 if (! host_integerp (size, 1))
12129 if (! integer_all_onesp (size))
12131 if (! host_integerp (len, 1))
12133 /* If LEN is not constant, try MAXLEN too.
12134 For MAXLEN only allow optimizing into non-_ocs function
12135 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12136 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12142 if (tree_int_cst_lt (size, maxlen))
12146 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12147 fn = built_in_decls[BUILT_IN_STRNCPY];
12151 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12154 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12155 are the arguments to the call. */
12158 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12159 tree src, tree size)
12164 if (!validate_arg (dest, POINTER_TYPE)
12165 || !validate_arg (src, POINTER_TYPE)
12166 || !validate_arg (size, INTEGER_TYPE))
12169 p = c_getstr (src);
12170 /* If the SRC parameter is "", return DEST. */
12171 if (p && *p == '\0')
12172 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12174 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12177 /* If __builtin_strcat_chk is used, assume strcat is available. */
12178 fn = built_in_decls[BUILT_IN_STRCAT];
12182 return build_call_expr_loc (loc, fn, 2, dest, src);
12185 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12189 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12190 tree dest, tree src, tree len, tree size)
12195 if (!validate_arg (dest, POINTER_TYPE)
12196 || !validate_arg (src, POINTER_TYPE)
12197 || !validate_arg (size, INTEGER_TYPE)
12198 || !validate_arg (size, INTEGER_TYPE))
12201 p = c_getstr (src);
12202 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12203 if (p && *p == '\0')
12204 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12205 else if (integer_zerop (len))
12206 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12208 if (! host_integerp (size, 1))
12211 if (! integer_all_onesp (size))
12213 tree src_len = c_strlen (src, 1);
12215 && host_integerp (src_len, 1)
12216 && host_integerp (len, 1)
12217 && ! tree_int_cst_lt (len, src_len))
12219 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12220 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12224 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12229 /* If __builtin_strncat_chk is used, assume strncat is available. */
12230 fn = built_in_decls[BUILT_IN_STRNCAT];
12234 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12237 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12238 a normal call should be emitted rather than expanding the function
12239 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12242 fold_builtin_sprintf_chk (location_t loc, tree exp,
12243 enum built_in_function fcode)
12245 tree dest, size, len, fn, fmt, flag;
12246 const char *fmt_str;
12247 int nargs = call_expr_nargs (exp);
12249 /* Verify the required arguments in the original call. */
12252 dest = CALL_EXPR_ARG (exp, 0);
12253 if (!validate_arg (dest, POINTER_TYPE))
12255 flag = CALL_EXPR_ARG (exp, 1);
12256 if (!validate_arg (flag, INTEGER_TYPE))
12258 size = CALL_EXPR_ARG (exp, 2);
12259 if (!validate_arg (size, INTEGER_TYPE))
12261 fmt = CALL_EXPR_ARG (exp, 3);
12262 if (!validate_arg (fmt, POINTER_TYPE))
12265 if (! host_integerp (size, 1))
12270 if (!init_target_chars ())
12273 /* Check whether the format is a literal string constant. */
12274 fmt_str = c_getstr (fmt);
12275 if (fmt_str != NULL)
12277 /* If the format doesn't contain % args or %%, we know the size. */
12278 if (strchr (fmt_str, target_percent) == 0)
12280 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12281 len = build_int_cstu (size_type_node, strlen (fmt_str));
12283 /* If the format is "%s" and first ... argument is a string literal,
12284 we know the size too. */
12285 else if (fcode == BUILT_IN_SPRINTF_CHK
12286 && strcmp (fmt_str, target_percent_s) == 0)
12292 arg = CALL_EXPR_ARG (exp, 4);
12293 if (validate_arg (arg, POINTER_TYPE))
12295 len = c_strlen (arg, 1);
12296 if (! len || ! host_integerp (len, 1))
12303 if (! integer_all_onesp (size))
12305 if (! len || ! tree_int_cst_lt (len, size))
12309 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12310 or if format doesn't contain % chars or is "%s". */
12311 if (! integer_zerop (flag))
12313 if (fmt_str == NULL)
12315 if (strchr (fmt_str, target_percent) != NULL
12316 && strcmp (fmt_str, target_percent_s))
12320 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12321 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12322 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12326 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12329 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12330 a normal call should be emitted rather than expanding the function
12331 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12332 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12333 passed as second argument. */
12336 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12337 enum built_in_function fcode)
12339 tree dest, size, len, fn, fmt, flag;
12340 const char *fmt_str;
12342 /* Verify the required arguments in the original call. */
12343 if (call_expr_nargs (exp) < 5)
12345 dest = CALL_EXPR_ARG (exp, 0);
12346 if (!validate_arg (dest, POINTER_TYPE))
12348 len = CALL_EXPR_ARG (exp, 1);
12349 if (!validate_arg (len, INTEGER_TYPE))
12351 flag = CALL_EXPR_ARG (exp, 2);
12352 if (!validate_arg (flag, INTEGER_TYPE))
12354 size = CALL_EXPR_ARG (exp, 3);
12355 if (!validate_arg (size, INTEGER_TYPE))
12357 fmt = CALL_EXPR_ARG (exp, 4);
12358 if (!validate_arg (fmt, POINTER_TYPE))
12361 if (! host_integerp (size, 1))
12364 if (! integer_all_onesp (size))
12366 if (! host_integerp (len, 1))
12368 /* If LEN is not constant, try MAXLEN too.
12369 For MAXLEN only allow optimizing into non-_ocs function
12370 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12371 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12377 if (tree_int_cst_lt (size, maxlen))
12381 if (!init_target_chars ())
12384 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12385 or if format doesn't contain % chars or is "%s". */
12386 if (! integer_zerop (flag))
12388 fmt_str = c_getstr (fmt);
12389 if (fmt_str == NULL)
12391 if (strchr (fmt_str, target_percent) != NULL
12392 && strcmp (fmt_str, target_percent_s))
12396 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12398 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12399 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12403 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12406 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12407 FMT and ARG are the arguments to the call; we don't fold cases with
12408 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12410 Return NULL_TREE if no simplification was possible, otherwise return the
12411 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12412 code of the function to be simplified. */
12415 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12416 tree arg, bool ignore,
12417 enum built_in_function fcode)
12419 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12420 const char *fmt_str = NULL;
12422 /* If the return value is used, don't do the transformation. */
12426 /* Verify the required arguments in the original call. */
12427 if (!validate_arg (fmt, POINTER_TYPE))
12430 /* Check whether the format is a literal string constant. */
12431 fmt_str = c_getstr (fmt);
12432 if (fmt_str == NULL)
12435 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12437 /* If we're using an unlocked function, assume the other
12438 unlocked functions exist explicitly. */
12439 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12440 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12444 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12445 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12448 if (!init_target_chars ())
12451 if (strcmp (fmt_str, target_percent_s) == 0
12452 || strchr (fmt_str, target_percent) == NULL)
12456 if (strcmp (fmt_str, target_percent_s) == 0)
12458 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12461 if (!arg || !validate_arg (arg, POINTER_TYPE))
12464 str = c_getstr (arg);
12470 /* The format specifier doesn't contain any '%' characters. */
12471 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12477 /* If the string was "", printf does nothing. */
12478 if (str[0] == '\0')
12479 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12481 /* If the string has length of 1, call putchar. */
12482 if (str[1] == '\0')
12484 /* Given printf("c"), (where c is any one character,)
12485 convert "c"[0] to an int and pass that to the replacement
12487 newarg = build_int_cst (NULL_TREE, str[0]);
12489 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12493 /* If the string was "string\n", call puts("string"). */
12494 size_t len = strlen (str);
12495 if ((unsigned char)str[len - 1] == target_newline)
12497 /* Create a NUL-terminated string that's one char shorter
12498 than the original, stripping off the trailing '\n'. */
12499 char *newstr = XALLOCAVEC (char, len);
12500 memcpy (newstr, str, len - 1);
12501 newstr[len - 1] = 0;
12503 newarg = build_string_literal (len, newstr);
12505 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12508 /* We'd like to arrange to call fputs(string,stdout) here,
12509 but we need stdout and don't have a way to get it yet. */
12514 /* The other optimizations can be done only on the non-va_list variants. */
12515 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12518 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12519 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12521 if (!arg || !validate_arg (arg, POINTER_TYPE))
12524 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12527 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12528 else if (strcmp (fmt_str, target_percent_c) == 0)
12530 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12533 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12539 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12542 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12543 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12544 more than 3 arguments, and ARG may be null in the 2-argument case.
12546 Return NULL_TREE if no simplification was possible, otherwise return the
12547 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12548 code of the function to be simplified. */
12551 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12552 tree fmt, tree arg, bool ignore,
12553 enum built_in_function fcode)
12555 tree fn_fputc, fn_fputs, call = NULL_TREE;
12556 const char *fmt_str = NULL;
12558 /* If the return value is used, don't do the transformation. */
12562 /* Verify the required arguments in the original call. */
12563 if (!validate_arg (fp, POINTER_TYPE))
12565 if (!validate_arg (fmt, POINTER_TYPE))
12568 /* Check whether the format is a literal string constant. */
12569 fmt_str = c_getstr (fmt);
12570 if (fmt_str == NULL)
12573 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12575 /* If we're using an unlocked function, assume the other
12576 unlocked functions exist explicitly. */
12577 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12578 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12582 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12583 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12586 if (!init_target_chars ())
12589 /* If the format doesn't contain % args or %%, use strcpy. */
12590 if (strchr (fmt_str, target_percent) == NULL)
12592 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12596 /* If the format specifier was "", fprintf does nothing. */
12597 if (fmt_str[0] == '\0')
12599 /* If FP has side-effects, just wait until gimplification is
12601 if (TREE_SIDE_EFFECTS (fp))
12604 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12607 /* When "string" doesn't contain %, replace all cases of
12608 fprintf (fp, string) with fputs (string, fp). The fputs
12609 builtin will take care of special cases like length == 1. */
12611 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12614 /* The other optimizations can be done only on the non-va_list variants. */
12615 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12618 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12619 else if (strcmp (fmt_str, target_percent_s) == 0)
12621 if (!arg || !validate_arg (arg, POINTER_TYPE))
12624 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12627 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12628 else if (strcmp (fmt_str, target_percent_c) == 0)
12630 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12633 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12638 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12641 /* Initialize format string characters in the target charset. */
12644 init_target_chars (void)
12649 target_newline = lang_hooks.to_target_charset ('\n');
12650 target_percent = lang_hooks.to_target_charset ('%');
12651 target_c = lang_hooks.to_target_charset ('c');
12652 target_s = lang_hooks.to_target_charset ('s');
12653 if (target_newline == 0 || target_percent == 0 || target_c == 0
12657 target_percent_c[0] = target_percent;
12658 target_percent_c[1] = target_c;
12659 target_percent_c[2] = '\0';
12661 target_percent_s[0] = target_percent;
12662 target_percent_s[1] = target_s;
12663 target_percent_s[2] = '\0';
12665 target_percent_s_newline[0] = target_percent;
12666 target_percent_s_newline[1] = target_s;
12667 target_percent_s_newline[2] = target_newline;
12668 target_percent_s_newline[3] = '\0';
12675 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12676 and no overflow/underflow occurred. INEXACT is true if M was not
12677 exactly calculated. TYPE is the tree type for the result. This
12678 function assumes that you cleared the MPFR flags and then
12679 calculated M to see if anything subsequently set a flag prior to
12680 entering this function. Return NULL_TREE if any checks fail. */
12683 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12685 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12686 overflow/underflow occurred. If -frounding-math, proceed iff the
12687 result of calling FUNC was exact. */
12688 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12689 && (!flag_rounding_math || !inexact))
12691 REAL_VALUE_TYPE rr;
12693 real_from_mpfr (&rr, m, type, GMP_RNDN);
12694 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12695 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12696 but the mpft_t is not, then we underflowed in the
12698 if (real_isfinite (&rr)
12699 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12701 REAL_VALUE_TYPE rmode;
12703 real_convert (&rmode, TYPE_MODE (type), &rr);
12704 /* Proceed iff the specified mode can hold the value. */
12705 if (real_identical (&rmode, &rr))
12706 return build_real (type, rmode);
12712 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12713 number and no overflow/underflow occurred. INEXACT is true if M
12714 was not exactly calculated. TYPE is the tree type for the result.
12715 This function assumes that you cleared the MPFR flags and then
12716 calculated M to see if anything subsequently set a flag prior to
12717 entering this function. Return NULL_TREE if any checks fail, if
12718 FORCE_CONVERT is true, then bypass the checks. */
12721 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12723 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12724 overflow/underflow occurred. If -frounding-math, proceed iff the
12725 result of calling FUNC was exact. */
12727 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12728 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12729 && (!flag_rounding_math || !inexact)))
12731 REAL_VALUE_TYPE re, im;
12733 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12734 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12735 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12736 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12737 but the mpft_t is not, then we underflowed in the
12740 || (real_isfinite (&re) && real_isfinite (&im)
12741 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12742 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12744 REAL_VALUE_TYPE re_mode, im_mode;
12746 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12747 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12748 /* Proceed iff the specified mode can hold the value. */
12750 || (real_identical (&re_mode, &re)
12751 && real_identical (&im_mode, &im)))
12752 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12753 build_real (TREE_TYPE (type), im_mode));
12759 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12760 FUNC on it and return the resulting value as a tree with type TYPE.
12761 If MIN and/or MAX are not NULL, then the supplied ARG must be
12762 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12763 acceptable values, otherwise they are not. The mpfr precision is
12764 set to the precision of TYPE. We assume that function FUNC returns
12765 zero if the result could be calculated exactly within the requested
12769 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12770 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12773 tree result = NULL_TREE;
12777 /* To proceed, MPFR must exactly represent the target floating point
12778 format, which only happens when the target base equals two. */
12779 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12780 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12782 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12784 if (real_isfinite (ra)
12785 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12786 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12788 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12789 const int prec = fmt->p;
12790 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12794 mpfr_init2 (m, prec);
12795 mpfr_from_real (m, ra, GMP_RNDN);
12796 mpfr_clear_flags ();
12797 inexact = func (m, m, rnd);
12798 result = do_mpfr_ckconv (m, type, inexact);
12806 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12807 FUNC on it and return the resulting value as a tree with type TYPE.
12808 The mpfr precision is set to the precision of TYPE. We assume that
12809 function FUNC returns zero if the result could be calculated
12810 exactly within the requested precision. */
12813 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12814 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12816 tree result = NULL_TREE;
12821 /* To proceed, MPFR must exactly represent the target floating point
12822 format, which only happens when the target base equals two. */
12823 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12824 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12825 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12827 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12828 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12830 if (real_isfinite (ra1) && real_isfinite (ra2))
12832 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12833 const int prec = fmt->p;
12834 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12838 mpfr_inits2 (prec, m1, m2, NULL);
12839 mpfr_from_real (m1, ra1, GMP_RNDN);
12840 mpfr_from_real (m2, ra2, GMP_RNDN);
12841 mpfr_clear_flags ();
12842 inexact = func (m1, m1, m2, rnd);
12843 result = do_mpfr_ckconv (m1, type, inexact);
12844 mpfr_clears (m1, m2, NULL);
12851 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12852 FUNC on it and return the resulting value as a tree with type TYPE.
12853 The mpfr precision is set to the precision of TYPE. We assume that
12854 function FUNC returns zero if the result could be calculated
12855 exactly within the requested precision. */
12858 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12859 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12861 tree result = NULL_TREE;
12867 /* To proceed, MPFR must exactly represent the target floating point
12868 format, which only happens when the target base equals two. */
12869 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12870 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12871 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12872 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12874 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12875 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12876 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12878 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12880 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12881 const int prec = fmt->p;
12882 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12886 mpfr_inits2 (prec, m1, m2, m3, NULL);
12887 mpfr_from_real (m1, ra1, GMP_RNDN);
12888 mpfr_from_real (m2, ra2, GMP_RNDN);
12889 mpfr_from_real (m3, ra3, GMP_RNDN);
12890 mpfr_clear_flags ();
12891 inexact = func (m1, m1, m2, m3, rnd);
12892 result = do_mpfr_ckconv (m1, type, inexact);
12893 mpfr_clears (m1, m2, m3, NULL);
12900 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12901 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12902 If ARG_SINP and ARG_COSP are NULL then the result is returned
12903 as a complex value.
12904 The type is taken from the type of ARG and is used for setting the
12905 precision of the calculation and results. */
12908 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12910 tree const type = TREE_TYPE (arg);
12911 tree result = NULL_TREE;
12915 /* To proceed, MPFR must exactly represent the target floating point
12916 format, which only happens when the target base equals two. */
12917 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12918 && TREE_CODE (arg) == REAL_CST
12919 && !TREE_OVERFLOW (arg))
12921 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12923 if (real_isfinite (ra))
12925 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12926 const int prec = fmt->p;
12927 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12928 tree result_s, result_c;
12932 mpfr_inits2 (prec, m, ms, mc, NULL);
12933 mpfr_from_real (m, ra, GMP_RNDN);
12934 mpfr_clear_flags ();
12935 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12936 result_s = do_mpfr_ckconv (ms, type, inexact);
12937 result_c = do_mpfr_ckconv (mc, type, inexact);
12938 mpfr_clears (m, ms, mc, NULL);
12939 if (result_s && result_c)
12941 /* If we are to return in a complex value do so. */
12942 if (!arg_sinp && !arg_cosp)
12943 return build_complex (build_complex_type (type),
12944 result_c, result_s);
12946 /* Dereference the sin/cos pointer arguments. */
12947 arg_sinp = build_fold_indirect_ref (arg_sinp);
12948 arg_cosp = build_fold_indirect_ref (arg_cosp);
12949 /* Proceed if valid pointer type were passed in. */
12950 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12951 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12953 /* Set the values. */
12954 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12956 TREE_SIDE_EFFECTS (result_s) = 1;
12957 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12959 TREE_SIDE_EFFECTS (result_c) = 1;
12960 /* Combine the assignments into a compound expr. */
12961 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12962 result_s, result_c));
12970 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12971 two-argument mpfr order N Bessel function FUNC on them and return
12972 the resulting value as a tree with type TYPE. The mpfr precision
12973 is set to the precision of TYPE. We assume that function FUNC
12974 returns zero if the result could be calculated exactly within the
12975 requested precision. */
12977 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12978 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12979 const REAL_VALUE_TYPE *min, bool inclusive)
12981 tree result = NULL_TREE;
12986 /* To proceed, MPFR must exactly represent the target floating point
12987 format, which only happens when the target base equals two. */
12988 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12989 && host_integerp (arg1, 0)
12990 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12992 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12993 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12996 && real_isfinite (ra)
12997 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12999 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13000 const int prec = fmt->p;
13001 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13005 mpfr_init2 (m, prec);
13006 mpfr_from_real (m, ra, GMP_RNDN);
13007 mpfr_clear_flags ();
13008 inexact = func (m, n, m, rnd);
13009 result = do_mpfr_ckconv (m, type, inexact);
13017 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13018 the pointer *(ARG_QUO) and return the result. The type is taken
13019 from the type of ARG0 and is used for setting the precision of the
13020 calculation and results. */
13023 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13025 tree const type = TREE_TYPE (arg0);
13026 tree result = NULL_TREE;
13031 /* To proceed, MPFR must exactly represent the target floating point
13032 format, which only happens when the target base equals two. */
13033 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13034 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13035 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13037 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13038 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13040 if (real_isfinite (ra0) && real_isfinite (ra1))
13042 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13043 const int prec = fmt->p;
13044 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13049 mpfr_inits2 (prec, m0, m1, NULL);
13050 mpfr_from_real (m0, ra0, GMP_RNDN);
13051 mpfr_from_real (m1, ra1, GMP_RNDN);
13052 mpfr_clear_flags ();
13053 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13054 /* Remquo is independent of the rounding mode, so pass
13055 inexact=0 to do_mpfr_ckconv(). */
13056 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13057 mpfr_clears (m0, m1, NULL);
13060 /* MPFR calculates quo in the host's long so it may
13061 return more bits in quo than the target int can hold
13062 if sizeof(host long) > sizeof(target int). This can
13063 happen even for native compilers in LP64 mode. In
13064 these cases, modulo the quo value with the largest
13065 number that the target int can hold while leaving one
13066 bit for the sign. */
13067 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13068 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13070 /* Dereference the quo pointer argument. */
13071 arg_quo = build_fold_indirect_ref (arg_quo);
13072 /* Proceed iff a valid pointer type was passed in. */
13073 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13075 /* Set the value. */
13076 tree result_quo = fold_build2 (MODIFY_EXPR,
13077 TREE_TYPE (arg_quo), arg_quo,
13078 build_int_cst (NULL, integer_quo));
13079 TREE_SIDE_EFFECTS (result_quo) = 1;
13080 /* Combine the quo assignment with the rem. */
13081 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13082 result_quo, result_rem));
13090 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13091 resulting value as a tree with type TYPE. The mpfr precision is
13092 set to the precision of TYPE. We assume that this mpfr function
13093 returns zero if the result could be calculated exactly within the
13094 requested precision. In addition, the integer pointer represented
13095 by ARG_SG will be dereferenced and set to the appropriate signgam
13099 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13101 tree result = NULL_TREE;
13105 /* To proceed, MPFR must exactly represent the target floating point
13106 format, which only happens when the target base equals two. Also
13107 verify ARG is a constant and that ARG_SG is an int pointer. */
13108 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13109 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13110 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13111 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13113 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13115 /* In addition to NaN and Inf, the argument cannot be zero or a
13116 negative integer. */
13117 if (real_isfinite (ra)
13118 && ra->cl != rvc_zero
13119 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13121 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13122 const int prec = fmt->p;
13123 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13128 mpfr_init2 (m, prec);
13129 mpfr_from_real (m, ra, GMP_RNDN);
13130 mpfr_clear_flags ();
13131 inexact = mpfr_lgamma (m, &sg, m, rnd);
13132 result_lg = do_mpfr_ckconv (m, type, inexact);
13138 /* Dereference the arg_sg pointer argument. */
13139 arg_sg = build_fold_indirect_ref (arg_sg);
13140 /* Assign the signgam value into *arg_sg. */
13141 result_sg = fold_build2 (MODIFY_EXPR,
13142 TREE_TYPE (arg_sg), arg_sg,
13143 build_int_cst (NULL, sg));
13144 TREE_SIDE_EFFECTS (result_sg) = 1;
13145 /* Combine the signgam assignment with the lgamma result. */
13146 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13147 result_sg, result_lg));
13155 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13156 function FUNC on it and return the resulting value as a tree with
13157 type TYPE. The mpfr precision is set to the precision of TYPE. We
13158 assume that function FUNC returns zero if the result could be
13159 calculated exactly within the requested precision. */
13162 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13164 tree result = NULL_TREE;
13168 /* To proceed, MPFR must exactly represent the target floating point
13169 format, which only happens when the target base equals two. */
13170 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13171 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13172 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13174 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13175 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13177 if (real_isfinite (re) && real_isfinite (im))
13179 const struct real_format *const fmt =
13180 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13181 const int prec = fmt->p;
13182 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13183 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13187 mpc_init2 (m, prec);
13188 mpfr_from_real (mpc_realref(m), re, rnd);
13189 mpfr_from_real (mpc_imagref(m), im, rnd);
13190 mpfr_clear_flags ();
13191 inexact = func (m, m, crnd);
13192 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13200 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13201 mpc function FUNC on it and return the resulting value as a tree
13202 with type TYPE. The mpfr precision is set to the precision of
13203 TYPE. We assume that function FUNC returns zero if the result
13204 could be calculated exactly within the requested precision. If
13205 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13206 in the arguments and/or results. */
13209 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13210 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13212 tree result = NULL_TREE;
13217 /* To proceed, MPFR must exactly represent the target floating point
13218 format, which only happens when the target base equals two. */
13219 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13221 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13223 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13225 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13226 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13227 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13228 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13231 || (real_isfinite (re0) && real_isfinite (im0)
13232 && real_isfinite (re1) && real_isfinite (im1)))
13234 const struct real_format *const fmt =
13235 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13236 const int prec = fmt->p;
13237 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13238 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13242 mpc_init2 (m0, prec);
13243 mpc_init2 (m1, prec);
13244 mpfr_from_real (mpc_realref(m0), re0, rnd);
13245 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13246 mpfr_from_real (mpc_realref(m1), re1, rnd);
13247 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13248 mpfr_clear_flags ();
13249 inexact = func (m0, m0, m1, crnd);
13250 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13260 The functions below provide an alternate interface for folding
13261 builtin function calls presented as GIMPLE_CALL statements rather
13262 than as CALL_EXPRs. The folded result is still expressed as a
13263 tree. There is too much code duplication in the handling of
13264 varargs functions, and a more intrusive re-factoring would permit
13265 better sharing of code between the tree and statement-based
13266 versions of these functions. */
13268 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13269 along with N new arguments specified as the "..." parameters. SKIP
13270 is the number of arguments in STMT to be omitted. This function is used
13271 to do varargs-to-varargs transformations. */
13274 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13276 int oldnargs = gimple_call_num_args (stmt);
13277 int nargs = oldnargs - skip + n;
13278 tree fntype = TREE_TYPE (fndecl);
13279 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13283 location_t loc = gimple_location (stmt);
13285 buffer = XALLOCAVEC (tree, nargs);
13287 for (i = 0; i < n; i++)
13288 buffer[i] = va_arg (ap, tree);
13290 for (j = skip; j < oldnargs; j++, i++)
13291 buffer[i] = gimple_call_arg (stmt, j);
13293 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13296 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13297 a normal call should be emitted rather than expanding the function
13298 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13301 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13303 tree dest, size, len, fn, fmt, flag;
13304 const char *fmt_str;
13305 int nargs = gimple_call_num_args (stmt);
13307 /* Verify the required arguments in the original call. */
13310 dest = gimple_call_arg (stmt, 0);
13311 if (!validate_arg (dest, POINTER_TYPE))
13313 flag = gimple_call_arg (stmt, 1);
13314 if (!validate_arg (flag, INTEGER_TYPE))
13316 size = gimple_call_arg (stmt, 2);
13317 if (!validate_arg (size, INTEGER_TYPE))
13319 fmt = gimple_call_arg (stmt, 3);
13320 if (!validate_arg (fmt, POINTER_TYPE))
13323 if (! host_integerp (size, 1))
13328 if (!init_target_chars ())
13331 /* Check whether the format is a literal string constant. */
13332 fmt_str = c_getstr (fmt);
13333 if (fmt_str != NULL)
13335 /* If the format doesn't contain % args or %%, we know the size. */
13336 if (strchr (fmt_str, target_percent) == 0)
13338 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13339 len = build_int_cstu (size_type_node, strlen (fmt_str));
13341 /* If the format is "%s" and first ... argument is a string literal,
13342 we know the size too. */
13343 else if (fcode == BUILT_IN_SPRINTF_CHK
13344 && strcmp (fmt_str, target_percent_s) == 0)
13350 arg = gimple_call_arg (stmt, 4);
13351 if (validate_arg (arg, POINTER_TYPE))
13353 len = c_strlen (arg, 1);
13354 if (! len || ! host_integerp (len, 1))
13361 if (! integer_all_onesp (size))
13363 if (! len || ! tree_int_cst_lt (len, size))
13367 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13368 or if format doesn't contain % chars or is "%s". */
13369 if (! integer_zerop (flag))
13371 if (fmt_str == NULL)
13373 if (strchr (fmt_str, target_percent) != NULL
13374 && strcmp (fmt_str, target_percent_s))
13378 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13379 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13380 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13384 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13387 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13388 a normal call should be emitted rather than expanding the function
13389 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13390 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13391 passed as second argument. */
13394 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13395 enum built_in_function fcode)
13397 tree dest, size, len, fn, fmt, flag;
13398 const char *fmt_str;
13400 /* Verify the required arguments in the original call. */
13401 if (gimple_call_num_args (stmt) < 5)
13403 dest = gimple_call_arg (stmt, 0);
13404 if (!validate_arg (dest, POINTER_TYPE))
13406 len = gimple_call_arg (stmt, 1);
13407 if (!validate_arg (len, INTEGER_TYPE))
13409 flag = gimple_call_arg (stmt, 2);
13410 if (!validate_arg (flag, INTEGER_TYPE))
13412 size = gimple_call_arg (stmt, 3);
13413 if (!validate_arg (size, INTEGER_TYPE))
13415 fmt = gimple_call_arg (stmt, 4);
13416 if (!validate_arg (fmt, POINTER_TYPE))
13419 if (! host_integerp (size, 1))
13422 if (! integer_all_onesp (size))
13424 if (! host_integerp (len, 1))
13426 /* If LEN is not constant, try MAXLEN too.
13427 For MAXLEN only allow optimizing into non-_ocs function
13428 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13429 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13435 if (tree_int_cst_lt (size, maxlen))
13439 if (!init_target_chars ())
13442 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13443 or if format doesn't contain % chars or is "%s". */
13444 if (! integer_zerop (flag))
13446 fmt_str = c_getstr (fmt);
13447 if (fmt_str == NULL)
13449 if (strchr (fmt_str, target_percent) != NULL
13450 && strcmp (fmt_str, target_percent_s))
13454 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13456 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13457 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13461 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13464 /* Builtins with folding operations that operate on "..." arguments
13465 need special handling; we need to store the arguments in a convenient
13466 data structure before attempting any folding. Fortunately there are
13467 only a few builtins that fall into this category. FNDECL is the
13468 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13469 result of the function call is ignored. */
13472 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13473 bool ignore ATTRIBUTE_UNUSED)
13475 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13476 tree ret = NULL_TREE;
13480 case BUILT_IN_SPRINTF_CHK:
13481 case BUILT_IN_VSPRINTF_CHK:
13482 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13485 case BUILT_IN_SNPRINTF_CHK:
13486 case BUILT_IN_VSNPRINTF_CHK:
13487 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13494 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13495 TREE_NO_WARNING (ret) = 1;
13501 /* A wrapper function for builtin folding that prevents warnings for
13502 "statement without effect" and the like, caused by removing the
13503 call node earlier than the warning is generated. */
13506 fold_call_stmt (gimple stmt, bool ignore)
13508 tree ret = NULL_TREE;
13509 tree fndecl = gimple_call_fndecl (stmt);
13510 location_t loc = gimple_location (stmt);
13512 && TREE_CODE (fndecl) == FUNCTION_DECL
13513 && DECL_BUILT_IN (fndecl)
13514 && !gimple_call_va_arg_pack_p (stmt))
13516 int nargs = gimple_call_num_args (stmt);
13518 if (avoid_folding_inline_builtin (fndecl))
13520 /* FIXME: Don't use a list in this interface. */
13521 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13523 tree arglist = NULL_TREE;
13525 for (i = nargs - 1; i >= 0; i--)
13526 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13527 return targetm.fold_builtin (fndecl, arglist, ignore);
13531 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13533 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13535 for (i = 0; i < nargs; i++)
13536 args[i] = gimple_call_arg (stmt, i);
13537 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13540 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13543 /* Propagate location information from original call to
13544 expansion of builtin. Otherwise things like
13545 maybe_emit_chk_warning, that operate on the expansion
13546 of a builtin, will use the wrong location information. */
13547 if (gimple_has_location (stmt))
13549 tree realret = ret;
13550 if (TREE_CODE (ret) == NOP_EXPR)
13551 realret = TREE_OPERAND (ret, 0);
13552 if (CAN_HAVE_LOCATION_P (realret)
13553 && !EXPR_HAS_LOCATION (realret))
13554 SET_EXPR_LOCATION (realret, loc);
13564 /* Look up the function in built_in_decls that corresponds to DECL
13565 and set ASMSPEC as its user assembler name. DECL must be a
13566 function decl that declares a builtin. */
13569 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13572 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13573 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13576 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13577 set_user_assembler_name (builtin, asmspec);
13578 switch (DECL_FUNCTION_CODE (decl))
13580 case BUILT_IN_MEMCPY:
13581 init_block_move_fn (asmspec);
13582 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13584 case BUILT_IN_MEMSET:
13585 init_block_clear_fn (asmspec);
13586 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13588 case BUILT_IN_MEMMOVE:
13589 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13591 case BUILT_IN_MEMCMP:
13592 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13594 case BUILT_IN_ABORT:
13595 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);