1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
457 loc = input_location;
459 src = string_constant (src, &offset_node);
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
473 for (i = 0; i < max; i++)
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
491 else if (! host_integerp (offset_node, 0))
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
526 src = string_constant (src, &offset_node);
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 c_readstr (const char *str, enum machine_mode mode)
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
654 SETUP_FRAME_ADDRESSES ();
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
796 if (i == ARRAY_SIZE (elim_regs))
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1270 apply_args_size (void)
1272 static int size = -1;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1303 apply_args_mode[regno] = VOIDmode;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1313 apply_result_size (void)
1315 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1384 expand_builtin_apply_args_1 (void)
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1463 temp = expand_builtin_apply_args_1 ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1515 do_pending_stack_adjust ();
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 /* Find the CALL insn we just emitted, and attach the register usage
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 /* Restore the return value and note that each value is used. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1687 call_fusage = get_insns ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1910 expand_call (exp, target, 0);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2127 return expand_call (exp, target, target == const0_rtx);
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2218 gcc_assert (result);
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2240 target = expand_call (exp, target, target == const0_rtx);
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2320 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2322 gcc_assert (insn_data[icode].operand[0].predicate
2323 (target, GET_MODE (target)));
2325 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2326 need to expand the argument again. This way, we will not perform
2327 side-effects more the once. */
2328 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2330 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2332 if (mode != GET_MODE (op0))
2333 op0 = convert_to_mode (mode, op0, 0);
2335 /* Compute into TARGET.
2336 Set TARGET to wherever the result comes back. */
2337 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 delete_insns_since (last);
2340 CALL_EXPR_ARG (exp, 0) = orig_arg;
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2352 expand_builtin_sincos (tree exp)
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2358 location_t loc = EXPR_LOCATION (exp);
2360 if (!validate_arglist (exp, REAL_TYPE,
2361 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2364 arg = CALL_EXPR_ARG (exp, 0);
2365 sinp = CALL_EXPR_ARG (exp, 1);
2366 cosp = CALL_EXPR_ARG (exp, 2);
2368 /* Make a suitable register to place result in. */
2369 mode = TYPE_MODE (TREE_TYPE (arg));
2371 /* Check if sincos insn is available, otherwise emit the call. */
2372 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2375 target1 = gen_reg_rtx (mode);
2376 target2 = gen_reg_rtx (mode);
2378 op0 = expand_normal (arg);
2379 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2380 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2382 /* Compute into target1 and target2.
2383 Set TARGET to wherever the result comes back. */
2384 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2385 gcc_assert (result);
2387 /* Move target1 and target2 to the memory locations indicated
2389 emit_move_insn (op1, target1);
2390 emit_move_insn (op2, target2);
2395 /* Expand a call to the internal cexpi builtin to the sincos math function.
2396 EXP is the expression that is a call to the builtin function; if convenient,
2397 the result should be placed in TARGET. SUBTARGET may be used as the target
2398 for computing one of EXP's operands. */
2401 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2403 tree fndecl = get_callee_fndecl (exp);
2405 enum machine_mode mode;
2407 location_t loc = EXPR_LOCATION (exp);
2409 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2412 arg = CALL_EXPR_ARG (exp, 0);
2413 type = TREE_TYPE (arg);
2414 mode = TYPE_MODE (TREE_TYPE (arg));
2416 /* Try expanding via a sincos optab, fall back to emitting a libcall
2417 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2418 is only generated from sincos, cexp or if we have either of them. */
2419 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2421 op1 = gen_reg_rtx (mode);
2422 op2 = gen_reg_rtx (mode);
2424 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2426 /* Compute into op1 and op2. */
2427 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2429 else if (TARGET_HAS_SINCOS)
2431 tree call, fn = NULL_TREE;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2436 fn = built_in_decls[BUILT_IN_SINCOSF];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2438 fn = built_in_decls[BUILT_IN_SINCOS];
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2440 fn = built_in_decls[BUILT_IN_SINCOSL];
2444 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2445 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2447 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2448 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2449 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2451 /* Make sure not to fold the sincos call again. */
2452 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2453 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2454 call, 3, arg, top1, top2));
2458 tree call, fn = NULL_TREE, narg;
2459 tree ctype = build_complex_type (type);
2461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2462 fn = built_in_decls[BUILT_IN_CEXPF];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2464 fn = built_in_decls[BUILT_IN_CEXP];
2465 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2466 fn = built_in_decls[BUILT_IN_CEXPL];
2470 /* If we don't have a decl for cexp create one. This is the
2471 friendliest fallback if the user calls __builtin_cexpi
2472 without full target C99 function support. */
2473 if (fn == NULL_TREE)
2476 const char *name = NULL;
2478 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2485 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2486 fn = build_fn_decl (name, fntype);
2489 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2490 build_real (type, dconst0), arg);
2492 /* Make sure not to fold the cexp call again. */
2493 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2494 return expand_expr (build_call_nary (ctype, call, 1, narg),
2495 target, VOIDmode, EXPAND_NORMAL);
2498 /* Now build the proper return type. */
2499 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2500 make_tree (TREE_TYPE (arg), op2),
2501 make_tree (TREE_TYPE (arg), op1)),
2502 target, VOIDmode, EXPAND_NORMAL);
2505 /* Conveniently construct a function call expression. FNDECL names the
2506 function to be called, N is the number of arguments, and the "..."
2507 parameters are the argument expressions. Unlike build_call_exr
2508 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2511 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2514 tree fntype = TREE_TYPE (fndecl);
2515 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2518 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 SET_EXPR_LOCATION (fn, loc);
2523 #define build_call_nofold(...) \
2524 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2526 /* Expand a call to one of the builtin rounding functions gcc defines
2527 as an extension (lfloor and lceil). As these are gcc extensions we
2528 do not need to worry about setting errno to EDOM.
2529 If expanding via optab fails, lower expression to (int)(floor(x)).
2530 EXP is the expression that is a call to the builtin function;
2531 if convenient, the result should be placed in TARGET. */
2534 expand_builtin_int_roundingfn (tree exp, rtx target)
2536 convert_optab builtin_optab;
2537 rtx op0, insns, tmp;
2538 tree fndecl = get_callee_fndecl (exp);
2539 enum built_in_function fallback_fn;
2540 tree fallback_fndecl;
2541 enum machine_mode mode;
2544 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2547 arg = CALL_EXPR_ARG (exp, 0);
2549 switch (DECL_FUNCTION_CODE (fndecl))
2551 CASE_FLT_FN (BUILT_IN_LCEIL):
2552 CASE_FLT_FN (BUILT_IN_LLCEIL):
2553 builtin_optab = lceil_optab;
2554 fallback_fn = BUILT_IN_CEIL;
2557 CASE_FLT_FN (BUILT_IN_LFLOOR):
2558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2559 builtin_optab = lfloor_optab;
2560 fallback_fn = BUILT_IN_FLOOR;
2567 /* Make a suitable register to place result in. */
2568 mode = TYPE_MODE (TREE_TYPE (exp));
2570 target = gen_reg_rtx (mode);
2572 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2573 need to expand the argument again. This way, we will not perform
2574 side-effects more the once. */
2575 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2577 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2581 /* Compute into TARGET. */
2582 if (expand_sfix_optab (target, op0, builtin_optab))
2584 /* Output the entire sequence. */
2585 insns = get_insns ();
2591 /* If we were unable to expand via the builtin, stop the sequence
2592 (without outputting the insns). */
2595 /* Fall back to floating point rounding optab. */
2596 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2598 /* For non-C99 targets we may end up without a fallback fndecl here
2599 if the user called __builtin_lfloor directly. In this case emit
2600 a call to the floor/ceil variants nevertheless. This should result
2601 in the best user experience for not full C99 targets. */
2602 if (fallback_fndecl == NULL_TREE)
2605 const char *name = NULL;
2607 switch (DECL_FUNCTION_CODE (fndecl))
2609 case BUILT_IN_LCEIL:
2610 case BUILT_IN_LLCEIL:
2613 case BUILT_IN_LCEILF:
2614 case BUILT_IN_LLCEILF:
2617 case BUILT_IN_LCEILL:
2618 case BUILT_IN_LLCEILL:
2621 case BUILT_IN_LFLOOR:
2622 case BUILT_IN_LLFLOOR:
2625 case BUILT_IN_LFLOORF:
2626 case BUILT_IN_LLFLOORF:
2629 case BUILT_IN_LFLOORL:
2630 case BUILT_IN_LLFLOORL:
2637 fntype = build_function_type_list (TREE_TYPE (arg),
2638 TREE_TYPE (arg), NULL_TREE);
2639 fallback_fndecl = build_fn_decl (name, fntype);
2642 exp = build_call_nofold (fallback_fndecl, 1, arg);
2644 tmp = expand_normal (exp);
2646 /* Truncate the result of floating point optab to integer
2647 via expand_fix (). */
2648 target = gen_reg_rtx (mode);
2649 expand_fix (target, tmp, 0);
2654 /* Expand a call to one of the builtin math functions doing integer
2656 Return 0 if a normal call should be emitted rather than expanding the
2657 function in-line. EXP is the expression that is a call to the builtin
2658 function; if convenient, the result should be placed in TARGET. */
2661 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2663 convert_optab builtin_optab;
2665 tree fndecl = get_callee_fndecl (exp);
2667 enum machine_mode mode;
2669 /* There's no easy way to detect the case we need to set EDOM. */
2670 if (flag_errno_math)
2673 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2676 arg = CALL_EXPR_ARG (exp, 0);
2678 switch (DECL_FUNCTION_CODE (fndecl))
2680 CASE_FLT_FN (BUILT_IN_LRINT):
2681 CASE_FLT_FN (BUILT_IN_LLRINT):
2682 builtin_optab = lrint_optab; break;
2683 CASE_FLT_FN (BUILT_IN_LROUND):
2684 CASE_FLT_FN (BUILT_IN_LLROUND):
2685 builtin_optab = lround_optab; break;
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns) and call to the library function
2715 with the stabilized argument list. */
2718 target = expand_call (exp, target, target == const0_rtx);
2723 /* To evaluate powi(x,n), the floating point value x raised to the
2724 constant integer exponent n, we use a hybrid algorithm that
2725 combines the "window method" with look-up tables. For an
2726 introduction to exponentiation algorithms and "addition chains",
2727 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2728 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2729 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2730 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2732 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2733 multiplications to inline before calling the system library's pow
2734 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2735 so this default never requires calling pow, powf or powl. */
2737 #ifndef POWI_MAX_MULTS
2738 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2741 /* The size of the "optimal power tree" lookup table. All
2742 exponents less than this value are simply looked up in the
2743 powi_table below. This threshold is also used to size the
2744 cache of pseudo registers that hold intermediate results. */
2745 #define POWI_TABLE_SIZE 256
2747 /* The size, in bits of the window, used in the "window method"
2748 exponentiation algorithm. This is equivalent to a radix of
2749 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2750 #define POWI_WINDOW_SIZE 3
2752 /* The following table is an efficient representation of an
2753 "optimal power tree". For each value, i, the corresponding
2754 value, j, in the table states than an optimal evaluation
2755 sequence for calculating pow(x,i) can be found by evaluating
2756 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2757 100 integers is given in Knuth's "Seminumerical algorithms". */
2759 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2761 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2762 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2763 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2764 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2765 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2766 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2767 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2768 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2769 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2770 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2771 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2772 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2773 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2774 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2775 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2776 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2777 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2778 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2779 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2780 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2781 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2782 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2783 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2784 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2785 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2786 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2787 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2788 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2789 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2790 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2791 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2792 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2798 subroutine of powi_cost. CACHE is an array indicating
2799 which exponents have already been calculated. */
2802 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2804 /* If we've already calculated this exponent, then this evaluation
2805 doesn't require any additional multiplications. */
2810 return powi_lookup_cost (n - powi_table[n], cache)
2811 + powi_lookup_cost (powi_table[n], cache) + 1;
2814 /* Return the number of multiplications required to calculate
2815 powi(x,n) for an arbitrary x, given the exponent N. This
2816 function needs to be kept in sync with expand_powi below. */
2819 powi_cost (HOST_WIDE_INT n)
2821 bool cache[POWI_TABLE_SIZE];
2822 unsigned HOST_WIDE_INT digit;
2823 unsigned HOST_WIDE_INT val;
2829 /* Ignore the reciprocal when calculating the cost. */
2830 val = (n < 0) ? -n : n;
2832 /* Initialize the exponent cache. */
2833 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2838 while (val >= POWI_TABLE_SIZE)
2842 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2843 result += powi_lookup_cost (digit, cache)
2844 + POWI_WINDOW_SIZE + 1;
2845 val >>= POWI_WINDOW_SIZE;
2854 return result + powi_lookup_cost (val, cache);
2857 /* Recursive subroutine of expand_powi. This function takes the array,
2858 CACHE, of already calculated exponents and an exponent N and returns
2859 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2862 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2864 unsigned HOST_WIDE_INT digit;
2868 if (n < POWI_TABLE_SIZE)
2873 target = gen_reg_rtx (mode);
2876 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2877 op1 = expand_powi_1 (mode, powi_table[n], cache);
2881 target = gen_reg_rtx (mode);
2882 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2883 op0 = expand_powi_1 (mode, n - digit, cache);
2884 op1 = expand_powi_1 (mode, digit, cache);
2888 target = gen_reg_rtx (mode);
2889 op0 = expand_powi_1 (mode, n >> 1, cache);
2893 result = expand_mult (mode, op0, op1, target, 0);
2894 if (result != target)
2895 emit_move_insn (target, result);
2899 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2900 floating point operand in mode MODE, and N is the exponent. This
2901 function needs to be kept in sync with powi_cost above. */
2904 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2906 rtx cache[POWI_TABLE_SIZE];
2910 return CONST1_RTX (mode);
2912 memset (cache, 0, sizeof (cache));
2915 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2917 /* If the original exponent was negative, reciprocate the result. */
2919 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2920 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2925 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2926 a normal call should be emitted rather than expanding the function
2927 in-line. EXP is the expression that is a call to the builtin
2928 function; if convenient, the result should be placed in TARGET. */
2931 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2935 tree type = TREE_TYPE (exp);
2936 REAL_VALUE_TYPE cint, c, c2;
2939 enum machine_mode mode = TYPE_MODE (type);
2941 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2944 arg0 = CALL_EXPR_ARG (exp, 0);
2945 arg1 = CALL_EXPR_ARG (exp, 1);
2947 if (TREE_CODE (arg1) != REAL_CST
2948 || TREE_OVERFLOW (arg1))
2949 return expand_builtin_mathfn_2 (exp, target, subtarget);
2951 /* Handle constant exponents. */
2953 /* For integer valued exponents we can expand to an optimal multiplication
2954 sequence using expand_powi. */
2955 c = TREE_REAL_CST (arg1);
2956 n = real_to_integer (&c);
2957 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2958 if (real_identical (&c, &cint)
2959 && ((n >= -1 && n <= 2)
2960 || (flag_unsafe_math_optimizations
2961 && optimize_insn_for_speed_p ()
2962 && powi_cost (n) <= POWI_MAX_MULTS)))
2964 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2967 op = force_reg (mode, op);
2968 op = expand_powi (op, mode, n);
2973 narg0 = builtin_save_expr (arg0);
2975 /* If the exponent is not integer valued, check if it is half of an integer.
2976 In this case we can expand to sqrt (x) * x**(n/2). */
2977 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2978 if (fn != NULL_TREE)
2980 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2981 n = real_to_integer (&c2);
2982 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2983 if (real_identical (&c2, &cint)
2984 && ((flag_unsafe_math_optimizations
2985 && optimize_insn_for_speed_p ()
2986 && powi_cost (n/2) <= POWI_MAX_MULTS)
2987 /* Even the c==0.5 case cannot be done unconditionally
2988 when we need to preserve signed zeros, as
2989 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
2990 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)))
2992 tree call_expr = build_call_nofold (fn, 1, narg0);
2993 /* Use expand_expr in case the newly built call expression
2994 was folded to a non-call. */
2995 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2998 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2999 op2 = force_reg (mode, op2);
3000 op2 = expand_powi (op2, mode, abs (n / 2));
3001 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3002 0, OPTAB_LIB_WIDEN);
3003 /* If the original exponent was negative, reciprocate the
3006 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3007 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3013 /* Try if the exponent is a third of an integer. In this case
3014 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3015 different from pow (x, 1./3.) due to rounding and behavior
3016 with negative x we need to constrain this transformation to
3017 unsafe math and positive x or finite math. */
3018 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3020 && flag_unsafe_math_optimizations
3021 && (tree_expr_nonnegative_p (arg0)
3022 || !HONOR_NANS (mode)))
3024 REAL_VALUE_TYPE dconst3;
3025 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3026 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3027 real_round (&c2, mode, &c2);
3028 n = real_to_integer (&c2);
3029 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3030 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3031 real_convert (&c2, mode, &c2);
3032 if (real_identical (&c2, &c)
3033 && ((optimize_insn_for_speed_p ()
3034 && powi_cost (n/3) <= POWI_MAX_MULTS)
3037 tree call_expr = build_call_nofold (fn, 1,narg0);
3038 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3039 if (abs (n) % 3 == 2)
3040 op = expand_simple_binop (mode, MULT, op, op, op,
3041 0, OPTAB_LIB_WIDEN);
3044 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3045 op2 = force_reg (mode, op2);
3046 op2 = expand_powi (op2, mode, abs (n / 3));
3047 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3048 0, OPTAB_LIB_WIDEN);
3049 /* If the original exponent was negative, reciprocate the
3052 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3053 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3059 /* Fall back to optab expansion. */
3060 return expand_builtin_mathfn_2 (exp, target, subtarget);
3063 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3064 a normal call should be emitted rather than expanding the function
3065 in-line. EXP is the expression that is a call to the builtin
3066 function; if convenient, the result should be placed in TARGET. */
3069 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3073 enum machine_mode mode;
3074 enum machine_mode mode2;
3076 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3079 arg0 = CALL_EXPR_ARG (exp, 0);
3080 arg1 = CALL_EXPR_ARG (exp, 1);
3081 mode = TYPE_MODE (TREE_TYPE (exp));
3083 /* Handle constant power. */
3085 if (TREE_CODE (arg1) == INTEGER_CST
3086 && !TREE_OVERFLOW (arg1))
3088 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3090 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3091 Otherwise, check the number of multiplications required. */
3092 if ((TREE_INT_CST_HIGH (arg1) == 0
3093 || TREE_INT_CST_HIGH (arg1) == -1)
3094 && ((n >= -1 && n <= 2)
3095 || (optimize_insn_for_speed_p ()
3096 && powi_cost (n) <= POWI_MAX_MULTS)))
3098 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3099 op0 = force_reg (mode, op0);
3100 return expand_powi (op0, mode, n);
3104 /* Emit a libcall to libgcc. */
3106 /* Mode of the 2nd argument must match that of an int. */
3107 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3109 if (target == NULL_RTX)
3110 target = gen_reg_rtx (mode);
3112 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3113 if (GET_MODE (op0) != mode)
3114 op0 = convert_to_mode (mode, op0, 0);
3115 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3116 if (GET_MODE (op1) != mode2)
3117 op1 = convert_to_mode (mode2, op1, 0);
3119 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3120 target, LCT_CONST, mode, 2,
3121 op0, mode, op1, mode2);
3126 /* Expand expression EXP which is a call to the strlen builtin. Return
3127 NULL_RTX if we failed the caller should emit a normal call, otherwise
3128 try to get the result in TARGET, if convenient. */
3131 expand_builtin_strlen (tree exp, rtx target,
3132 enum machine_mode target_mode)
3134 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3140 tree src = CALL_EXPR_ARG (exp, 0);
3141 rtx result, src_reg, char_rtx, before_strlen;
3142 enum machine_mode insn_mode = target_mode, char_mode;
3143 enum insn_code icode = CODE_FOR_nothing;
3146 /* If the length can be computed at compile-time, return it. */
3147 len = c_strlen (src, 0);
3149 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3151 /* If the length can be computed at compile-time and is constant
3152 integer, but there are side-effects in src, evaluate
3153 src for side-effects, then return len.
3154 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3155 can be optimized into: i++; x = 3; */
3156 len = c_strlen (src, 1);
3157 if (len && TREE_CODE (len) == INTEGER_CST)
3159 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3160 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3163 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3165 /* If SRC is not a pointer type, don't do this operation inline. */
3169 /* Bail out if we can't compute strlen in the right mode. */
3170 while (insn_mode != VOIDmode)
3172 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3173 if (icode != CODE_FOR_nothing)
3176 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3178 if (insn_mode == VOIDmode)
3181 /* Make a place to write the result of the instruction. */
3185 && GET_MODE (result) == insn_mode
3186 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3187 result = gen_reg_rtx (insn_mode);
3189 /* Make a place to hold the source address. We will not expand
3190 the actual source until we are sure that the expansion will
3191 not fail -- there are trees that cannot be expanded twice. */
3192 src_reg = gen_reg_rtx (Pmode);
3194 /* Mark the beginning of the strlen sequence so we can emit the
3195 source operand later. */
3196 before_strlen = get_last_insn ();
3198 char_rtx = const0_rtx;
3199 char_mode = insn_data[(int) icode].operand[2].mode;
3200 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3202 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3204 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3205 char_rtx, GEN_INT (align));
3210 /* Now that we are assured of success, expand the source. */
3212 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3214 emit_move_insn (src_reg, pat);
3219 emit_insn_after (pat, before_strlen);
3221 emit_insn_before (pat, get_insns ());
3223 /* Return the value in the proper mode for this function. */
3224 if (GET_MODE (result) == target_mode)
3226 else if (target != 0)
3227 convert_move (target, result, 0);
3229 target = convert_to_mode (target_mode, result, 0);
3235 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3236 bytes from constant string DATA + OFFSET and return it as target
3240 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3241 enum machine_mode mode)
3243 const char *str = (const char *) data;
3245 gcc_assert (offset >= 0
3246 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3247 <= strlen (str) + 1));
3249 return c_readstr (str + offset, mode);
3252 /* Expand a call EXP to the memcpy builtin.
3253 Return NULL_RTX if we failed, the caller should emit a normal call,
3254 otherwise try to get the result in TARGET, if convenient (and in
3255 mode MODE if that's convenient). */
3258 expand_builtin_memcpy (tree exp, rtx target)
3260 if (!validate_arglist (exp,
3261 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3265 tree dest = CALL_EXPR_ARG (exp, 0);
3266 tree src = CALL_EXPR_ARG (exp, 1);
3267 tree len = CALL_EXPR_ARG (exp, 2);
3268 const char *src_str;
3269 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3270 unsigned int dest_align
3271 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3272 rtx dest_mem, src_mem, dest_addr, len_rtx;
3273 HOST_WIDE_INT expected_size = -1;
3274 unsigned int expected_align = 0;
3276 /* If DEST is not a pointer type, call the normal function. */
3277 if (dest_align == 0)
3280 /* If either SRC is not a pointer type, don't do this
3281 operation in-line. */
3285 if (currently_expanding_gimple_stmt)
3286 stringop_block_profile (currently_expanding_gimple_stmt,
3287 &expected_align, &expected_size);
3289 if (expected_align < dest_align)
3290 expected_align = dest_align;
3291 dest_mem = get_memory_rtx (dest, len);
3292 set_mem_align (dest_mem, dest_align);
3293 len_rtx = expand_normal (len);
3294 src_str = c_getstr (src);
3296 /* If SRC is a string constant and block move would be done
3297 by pieces, we can avoid loading the string from memory
3298 and only stored the computed constants. */
3300 && CONST_INT_P (len_rtx)
3301 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3302 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3303 CONST_CAST (char *, src_str),
3306 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3307 builtin_memcpy_read_str,
3308 CONST_CAST (char *, src_str),
3309 dest_align, false, 0);
3310 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3311 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3315 src_mem = get_memory_rtx (src, len);
3316 set_mem_align (src_mem, src_align);
3318 /* Copy word part most expediently. */
3319 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3320 CALL_EXPR_TAILCALL (exp)
3321 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3322 expected_align, expected_size);
3326 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3327 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3333 /* Expand a call EXP to the mempcpy builtin.
3334 Return NULL_RTX if we failed; the caller should emit a normal call,
3335 otherwise try to get the result in TARGET, if convenient (and in
3336 mode MODE if that's convenient). If ENDP is 0 return the
3337 destination pointer, if ENDP is 1 return the end pointer ala
3338 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3342 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3344 if (!validate_arglist (exp,
3345 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3349 tree dest = CALL_EXPR_ARG (exp, 0);
3350 tree src = CALL_EXPR_ARG (exp, 1);
3351 tree len = CALL_EXPR_ARG (exp, 2);
3352 return expand_builtin_mempcpy_args (dest, src, len,
3353 target, mode, /*endp=*/ 1);
3357 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3358 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3359 so that this can also be called without constructing an actual CALL_EXPR.
3360 The other arguments and return value are the same as for
3361 expand_builtin_mempcpy. */
3364 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3365 rtx target, enum machine_mode mode, int endp)
3367 /* If return value is ignored, transform mempcpy into memcpy. */
3368 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3370 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3371 tree result = build_call_nofold (fn, 3, dest, src, len);
3372 return expand_expr (result, target, mode, EXPAND_NORMAL);
3376 const char *src_str;
3377 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3378 unsigned int dest_align
3379 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3380 rtx dest_mem, src_mem, len_rtx;
3382 /* If either SRC or DEST is not a pointer type, don't do this
3383 operation in-line. */
3384 if (dest_align == 0 || src_align == 0)
3387 /* If LEN is not constant, call the normal function. */
3388 if (! host_integerp (len, 1))
3391 len_rtx = expand_normal (len);
3392 src_str = c_getstr (src);
3394 /* If SRC is a string constant and block move would be done
3395 by pieces, we can avoid loading the string from memory
3396 and only stored the computed constants. */
3398 && CONST_INT_P (len_rtx)
3399 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3400 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3401 CONST_CAST (char *, src_str),
3404 dest_mem = get_memory_rtx (dest, len);
3405 set_mem_align (dest_mem, dest_align);
3406 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3407 builtin_memcpy_read_str,
3408 CONST_CAST (char *, src_str),
3409 dest_align, false, endp);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3415 if (CONST_INT_P (len_rtx)
3416 && can_move_by_pieces (INTVAL (len_rtx),
3417 MIN (dest_align, src_align)))
3419 dest_mem = get_memory_rtx (dest, len);
3420 set_mem_align (dest_mem, dest_align);
3421 src_mem = get_memory_rtx (src, len);
3422 set_mem_align (src_mem, src_align);
3423 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3424 MIN (dest_align, src_align), endp);
3425 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3426 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3435 # define HAVE_movstr 0
3436 # define CODE_FOR_movstr CODE_FOR_nothing
3439 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3440 we failed, the caller should emit a normal call, otherwise try to
3441 get the result in TARGET, if convenient. If ENDP is 0 return the
3442 destination pointer, if ENDP is 1 return the end pointer ala
3443 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3447 expand_movstr (tree dest, tree src, rtx target, int endp)
3453 const struct insn_data * data;
3458 dest_mem = get_memory_rtx (dest, NULL);
3459 src_mem = get_memory_rtx (src, NULL);
3462 target = force_reg (Pmode, XEXP (dest_mem, 0));
3463 dest_mem = replace_equiv_address (dest_mem, target);
3464 end = gen_reg_rtx (Pmode);
3468 if (target == 0 || target == const0_rtx)
3470 end = gen_reg_rtx (Pmode);
3478 data = insn_data + CODE_FOR_movstr;
3480 if (data->operand[0].mode != VOIDmode)
3481 end = gen_lowpart (data->operand[0].mode, end);
3483 insn = data->genfun (end, dest_mem, src_mem);
3489 /* movstr is supposed to set end to the address of the NUL
3490 terminator. If the caller requested a mempcpy-like return value,
3492 if (endp == 1 && target != const0_rtx)
3494 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3495 emit_move_insn (target, force_operand (tem, NULL_RTX));
3501 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3502 NULL_RTX if we failed the caller should emit a normal call, otherwise
3503 try to get the result in TARGET, if convenient (and in mode MODE if that's
3507 expand_builtin_strcpy (tree exp, rtx target)
3509 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3511 tree dest = CALL_EXPR_ARG (exp, 0);
3512 tree src = CALL_EXPR_ARG (exp, 1);
3513 return expand_builtin_strcpy_args (dest, src, target);
3518 /* Helper function to do the actual work for expand_builtin_strcpy. The
3519 arguments to the builtin_strcpy call DEST and SRC are broken out
3520 so that this can also be called without constructing an actual CALL_EXPR.
3521 The other arguments and return value are the same as for
3522 expand_builtin_strcpy. */
3525 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3527 return expand_movstr (dest, src, target, /*endp=*/0);
3530 /* Expand a call EXP to the stpcpy builtin.
3531 Return NULL_RTX if we failed the caller should emit a normal call,
3532 otherwise try to get the result in TARGET, if convenient (and in
3533 mode MODE if that's convenient). */
3536 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3539 location_t loc = EXPR_LOCATION (exp);
3541 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3544 dst = CALL_EXPR_ARG (exp, 0);
3545 src = CALL_EXPR_ARG (exp, 1);
3547 /* If return value is ignored, transform stpcpy into strcpy. */
3548 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3550 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3551 tree result = build_call_nofold (fn, 2, dst, src);
3552 return expand_expr (result, target, mode, EXPAND_NORMAL);
3559 /* Ensure we get an actual string whose length can be evaluated at
3560 compile-time, not an expression containing a string. This is
3561 because the latter will potentially produce pessimized code
3562 when used to produce the return value. */
3563 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3564 return expand_movstr (dst, src, target, /*endp=*/2);
3566 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3567 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3568 target, mode, /*endp=*/2);
3573 if (TREE_CODE (len) == INTEGER_CST)
3575 rtx len_rtx = expand_normal (len);
3577 if (CONST_INT_P (len_rtx))
3579 ret = expand_builtin_strcpy_args (dst, src, target);
3585 if (mode != VOIDmode)
3586 target = gen_reg_rtx (mode);
3588 target = gen_reg_rtx (GET_MODE (ret));
3590 if (GET_MODE (target) != GET_MODE (ret))
3591 ret = gen_lowpart (GET_MODE (target), ret);
3593 ret = plus_constant (ret, INTVAL (len_rtx));
3594 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3602 return expand_movstr (dst, src, target, /*endp=*/2);
3606 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3607 bytes from constant string DATA + OFFSET and return it as target
3611 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3612 enum machine_mode mode)
3614 const char *str = (const char *) data;
3616 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3619 return c_readstr (str + offset, mode);
3622 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3623 NULL_RTX if we failed the caller should emit a normal call. */
3626 expand_builtin_strncpy (tree exp, rtx target)
3628 location_t loc = EXPR_LOCATION (exp);
3630 if (validate_arglist (exp,
3631 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3633 tree dest = CALL_EXPR_ARG (exp, 0);
3634 tree src = CALL_EXPR_ARG (exp, 1);
3635 tree len = CALL_EXPR_ARG (exp, 2);
3636 tree slen = c_strlen (src, 1);
3638 /* We must be passed a constant len and src parameter. */
3639 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3642 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3644 /* We're required to pad with trailing zeros if the requested
3645 len is greater than strlen(s2)+1. In that case try to
3646 use store_by_pieces, if it fails, punt. */
3647 if (tree_int_cst_lt (slen, len))
3649 unsigned int dest_align
3650 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3651 const char *p = c_getstr (src);
3654 if (!p || dest_align == 0 || !host_integerp (len, 1)
3655 || !can_store_by_pieces (tree_low_cst (len, 1),
3656 builtin_strncpy_read_str,
3657 CONST_CAST (char *, p),
3661 dest_mem = get_memory_rtx (dest, len);
3662 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3663 builtin_strncpy_read_str,
3664 CONST_CAST (char *, p), dest_align, false, 0);
3665 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3666 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3673 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3674 bytes from constant string DATA + OFFSET and return it as target
3678 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3679 enum machine_mode mode)
3681 const char *c = (const char *) data;
3682 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3684 memset (p, *c, GET_MODE_SIZE (mode));
3686 return c_readstr (p, mode);
3689 /* Callback routine for store_by_pieces. Return the RTL of a register
3690 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3691 char value given in the RTL register data. For example, if mode is
3692 4 bytes wide, return the RTL for 0x01010101*data. */
3695 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3696 enum machine_mode mode)
3702 size = GET_MODE_SIZE (mode);
3706 p = XALLOCAVEC (char, size);
3707 memset (p, 1, size);
3708 coeff = c_readstr (p, mode);
3710 target = convert_to_mode (mode, (rtx) data, 1);
3711 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3712 return force_reg (mode, target);
3715 /* Expand expression EXP, which is a call to the memset builtin. Return
3716 NULL_RTX if we failed the caller should emit a normal call, otherwise
3717 try to get the result in TARGET, if convenient (and in mode MODE if that's
3721 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3723 if (!validate_arglist (exp,
3724 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3728 tree dest = CALL_EXPR_ARG (exp, 0);
3729 tree val = CALL_EXPR_ARG (exp, 1);
3730 tree len = CALL_EXPR_ARG (exp, 2);
3731 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3735 /* Helper function to do the actual work for expand_builtin_memset. The
3736 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3737 so that this can also be called without constructing an actual CALL_EXPR.
3738 The other arguments and return value are the same as for
3739 expand_builtin_memset. */
3742 expand_builtin_memset_args (tree dest, tree val, tree len,
3743 rtx target, enum machine_mode mode, tree orig_exp)
3746 enum built_in_function fcode;
3748 unsigned int dest_align;
3749 rtx dest_mem, dest_addr, len_rtx;
3750 HOST_WIDE_INT expected_size = -1;
3751 unsigned int expected_align = 0;
3753 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3755 /* If DEST is not a pointer type, don't do this operation in-line. */
3756 if (dest_align == 0)
3759 if (currently_expanding_gimple_stmt)
3760 stringop_block_profile (currently_expanding_gimple_stmt,
3761 &expected_align, &expected_size);
3763 if (expected_align < dest_align)
3764 expected_align = dest_align;
3766 /* If the LEN parameter is zero, return DEST. */
3767 if (integer_zerop (len))
3769 /* Evaluate and ignore VAL in case it has side-effects. */
3770 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3771 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3774 /* Stabilize the arguments in case we fail. */
3775 dest = builtin_save_expr (dest);
3776 val = builtin_save_expr (val);
3777 len = builtin_save_expr (len);
3779 len_rtx = expand_normal (len);
3780 dest_mem = get_memory_rtx (dest, len);
3782 if (TREE_CODE (val) != INTEGER_CST)
3786 val_rtx = expand_normal (val);
3787 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3790 /* Assume that we can memset by pieces if we can store
3791 * the coefficients by pieces (in the required modes).
3792 * We can't pass builtin_memset_gen_str as that emits RTL. */
3794 if (host_integerp (len, 1)
3795 && can_store_by_pieces (tree_low_cst (len, 1),
3796 builtin_memset_read_str, &c, dest_align,
3799 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3801 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3802 builtin_memset_gen_str, val_rtx, dest_align,
3805 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3806 dest_align, expected_align,
3810 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3811 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3815 if (target_char_cast (val, &c))
3820 if (host_integerp (len, 1)
3821 && can_store_by_pieces (tree_low_cst (len, 1),
3822 builtin_memset_read_str, &c, dest_align,
3824 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3825 builtin_memset_read_str, &c, dest_align, true, 0);
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3827 dest_align, expected_align,
3831 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3832 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3836 set_mem_align (dest_mem, dest_align);
3837 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3838 CALL_EXPR_TAILCALL (orig_exp)
3839 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3840 expected_align, expected_size);
3844 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3845 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3851 fndecl = get_callee_fndecl (orig_exp);
3852 fcode = DECL_FUNCTION_CODE (fndecl);
3853 if (fcode == BUILT_IN_MEMSET)
3854 fn = build_call_nofold (fndecl, 3, dest, val, len);
3855 else if (fcode == BUILT_IN_BZERO)
3856 fn = build_call_nofold (fndecl, 2, dest, len);
3859 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3860 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3861 return expand_call (fn, target, target == const0_rtx);
3864 /* Expand expression EXP, which is a call to the bzero builtin. Return
3865 NULL_RTX if we failed the caller should emit a normal call. */
3868 expand_builtin_bzero (tree exp)
3871 location_t loc = EXPR_LOCATION (exp);
3873 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3876 dest = CALL_EXPR_ARG (exp, 0);
3877 size = CALL_EXPR_ARG (exp, 1);
3879 /* New argument list transforming bzero(ptr x, int y) to
3880 memset(ptr x, int 0, size_t y). This is done this way
3881 so that if it isn't expanded inline, we fallback to
3882 calling bzero instead of memset. */
3884 return expand_builtin_memset_args (dest, integer_zero_node,
3885 fold_convert_loc (loc, sizetype, size),
3886 const0_rtx, VOIDmode, exp);
3889 /* Expand expression EXP, which is a call to the memcmp built-in function.
3890 Return NULL_RTX if we failed and the
3891 caller should emit a normal call, otherwise try to get the result in
3892 TARGET, if convenient (and in mode MODE, if that's convenient). */
3895 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3896 ATTRIBUTE_UNUSED enum machine_mode mode)
3898 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3900 if (!validate_arglist (exp,
3901 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3904 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3906 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3909 tree arg1 = CALL_EXPR_ARG (exp, 0);
3910 tree arg2 = CALL_EXPR_ARG (exp, 1);
3911 tree len = CALL_EXPR_ARG (exp, 2);
3914 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3916 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3917 enum machine_mode insn_mode;
3919 #ifdef HAVE_cmpmemsi
3921 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3924 #ifdef HAVE_cmpstrnsi
3926 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3931 /* If we don't have POINTER_TYPE, call the function. */
3932 if (arg1_align == 0 || arg2_align == 0)
3935 /* Make a place to write the result of the instruction. */
3938 && REG_P (result) && GET_MODE (result) == insn_mode
3939 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3940 result = gen_reg_rtx (insn_mode);
3942 arg1_rtx = get_memory_rtx (arg1, len);
3943 arg2_rtx = get_memory_rtx (arg2, len);
3944 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3946 /* Set MEM_SIZE as appropriate. */
3947 if (CONST_INT_P (arg3_rtx))
3949 set_mem_size (arg1_rtx, arg3_rtx);
3950 set_mem_size (arg2_rtx, arg3_rtx);
3953 #ifdef HAVE_cmpmemsi
3955 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3956 GEN_INT (MIN (arg1_align, arg2_align)));
3959 #ifdef HAVE_cmpstrnsi
3961 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3962 GEN_INT (MIN (arg1_align, arg2_align)));
3970 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3971 TYPE_MODE (integer_type_node), 3,
3972 XEXP (arg1_rtx, 0), Pmode,
3973 XEXP (arg2_rtx, 0), Pmode,
3974 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3975 TYPE_UNSIGNED (sizetype)),
3976 TYPE_MODE (sizetype));
3978 /* Return the value in the proper mode for this function. */
3979 mode = TYPE_MODE (TREE_TYPE (exp));
3980 if (GET_MODE (result) == mode)
3982 else if (target != 0)
3984 convert_move (target, result, 0);
3988 return convert_to_mode (mode, result, 0);
3995 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3996 if we failed the caller should emit a normal call, otherwise try to get
3997 the result in TARGET, if convenient. */
4000 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4002 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4005 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4006 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4007 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4009 rtx arg1_rtx, arg2_rtx;
4010 rtx result, insn = NULL_RTX;
4012 tree arg1 = CALL_EXPR_ARG (exp, 0);
4013 tree arg2 = CALL_EXPR_ARG (exp, 1);
4016 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4018 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4020 /* If we don't have POINTER_TYPE, call the function. */
4021 if (arg1_align == 0 || arg2_align == 0)
4024 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4025 arg1 = builtin_save_expr (arg1);
4026 arg2 = builtin_save_expr (arg2);
4028 arg1_rtx = get_memory_rtx (arg1, NULL);
4029 arg2_rtx = get_memory_rtx (arg2, NULL);
4031 #ifdef HAVE_cmpstrsi
4032 /* Try to call cmpstrsi. */
4035 enum machine_mode insn_mode
4036 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4038 /* Make a place to write the result of the instruction. */
4041 && REG_P (result) && GET_MODE (result) == insn_mode
4042 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4043 result = gen_reg_rtx (insn_mode);
4045 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4046 GEN_INT (MIN (arg1_align, arg2_align)));
4049 #ifdef HAVE_cmpstrnsi
4050 /* Try to determine at least one length and call cmpstrnsi. */
4051 if (!insn && HAVE_cmpstrnsi)
4056 enum machine_mode insn_mode
4057 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4058 tree len1 = c_strlen (arg1, 1);
4059 tree len2 = c_strlen (arg2, 1);
4062 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4064 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4066 /* If we don't have a constant length for the first, use the length
4067 of the second, if we know it. We don't require a constant for
4068 this case; some cost analysis could be done if both are available
4069 but neither is constant. For now, assume they're equally cheap,
4070 unless one has side effects. If both strings have constant lengths,
4077 else if (TREE_SIDE_EFFECTS (len1))
4079 else if (TREE_SIDE_EFFECTS (len2))
4081 else if (TREE_CODE (len1) != INTEGER_CST)
4083 else if (TREE_CODE (len2) != INTEGER_CST)
4085 else if (tree_int_cst_lt (len1, len2))
4090 /* If both arguments have side effects, we cannot optimize. */
4091 if (!len || TREE_SIDE_EFFECTS (len))
4094 arg3_rtx = expand_normal (len);
4096 /* Make a place to write the result of the instruction. */
4099 && REG_P (result) && GET_MODE (result) == insn_mode
4100 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4101 result = gen_reg_rtx (insn_mode);
4103 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4104 GEN_INT (MIN (arg1_align, arg2_align)));
4110 enum machine_mode mode;
4113 /* Return the value in the proper mode for this function. */
4114 mode = TYPE_MODE (TREE_TYPE (exp));
4115 if (GET_MODE (result) == mode)
4118 return convert_to_mode (mode, result, 0);
4119 convert_move (target, result, 0);
4123 /* Expand the library call ourselves using a stabilized argument
4124 list to avoid re-evaluating the function's arguments twice. */
4125 #ifdef HAVE_cmpstrnsi
4128 fndecl = get_callee_fndecl (exp);
4129 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4130 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4131 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4132 return expand_call (fn, target, target == const0_rtx);
4138 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4139 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4140 the result in TARGET, if convenient. */
4143 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4144 ATTRIBUTE_UNUSED enum machine_mode mode)
4146 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4148 if (!validate_arglist (exp,
4149 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4152 /* If c_strlen can determine an expression for one of the string
4153 lengths, and it doesn't have side effects, then emit cmpstrnsi
4154 using length MIN(strlen(string)+1, arg3). */
4155 #ifdef HAVE_cmpstrnsi
4158 tree len, len1, len2;
4159 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4162 tree arg1 = CALL_EXPR_ARG (exp, 0);
4163 tree arg2 = CALL_EXPR_ARG (exp, 1);
4164 tree arg3 = CALL_EXPR_ARG (exp, 2);
4167 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4169 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4170 enum machine_mode insn_mode
4171 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4173 len1 = c_strlen (arg1, 1);
4174 len2 = c_strlen (arg2, 1);
4177 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4179 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4181 /* If we don't have a constant length for the first, use the length
4182 of the second, if we know it. We don't require a constant for
4183 this case; some cost analysis could be done if both are available
4184 but neither is constant. For now, assume they're equally cheap,
4185 unless one has side effects. If both strings have constant lengths,
4192 else if (TREE_SIDE_EFFECTS (len1))
4194 else if (TREE_SIDE_EFFECTS (len2))
4196 else if (TREE_CODE (len1) != INTEGER_CST)
4198 else if (TREE_CODE (len2) != INTEGER_CST)
4200 else if (tree_int_cst_lt (len1, len2))
4205 /* If both arguments have side effects, we cannot optimize. */
4206 if (!len || TREE_SIDE_EFFECTS (len))
4209 /* The actual new length parameter is MIN(len,arg3). */
4210 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4211 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4213 /* If we don't have POINTER_TYPE, call the function. */
4214 if (arg1_align == 0 || arg2_align == 0)
4217 /* Make a place to write the result of the instruction. */
4220 && REG_P (result) && GET_MODE (result) == insn_mode
4221 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4222 result = gen_reg_rtx (insn_mode);
4224 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4225 arg1 = builtin_save_expr (arg1);
4226 arg2 = builtin_save_expr (arg2);
4227 len = builtin_save_expr (len);
4229 arg1_rtx = get_memory_rtx (arg1, len);
4230 arg2_rtx = get_memory_rtx (arg2, len);
4231 arg3_rtx = expand_normal (len);
4232 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4233 GEN_INT (MIN (arg1_align, arg2_align)));
4238 /* Return the value in the proper mode for this function. */
4239 mode = TYPE_MODE (TREE_TYPE (exp));
4240 if (GET_MODE (result) == mode)
4243 return convert_to_mode (mode, result, 0);
4244 convert_move (target, result, 0);
4248 /* Expand the library call ourselves using a stabilized argument
4249 list to avoid re-evaluating the function's arguments twice. */
4250 fndecl = get_callee_fndecl (exp);
4251 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4252 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4253 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4254 return expand_call (fn, target, target == const0_rtx);
4260 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4261 if that's convenient. */
4264 expand_builtin_saveregs (void)
4268 /* Don't do __builtin_saveregs more than once in a function.
4269 Save the result of the first call and reuse it. */
4270 if (saveregs_value != 0)
4271 return saveregs_value;
4273 /* When this function is called, it means that registers must be
4274 saved on entry to this function. So we migrate the call to the
4275 first insn of this function. */
4279 /* Do whatever the machine needs done in this case. */
4280 val = targetm.calls.expand_builtin_saveregs ();
4285 saveregs_value = val;
4287 /* Put the insns after the NOTE that starts the function. If this
4288 is inside a start_sequence, make the outer-level insn chain current, so
4289 the code is placed at the start of the function. */
4290 push_topmost_sequence ();
4291 emit_insn_after (seq, entry_of_function ());
4292 pop_topmost_sequence ();
4297 /* __builtin_args_info (N) returns word N of the arg space info
4298 for the current function. The number and meanings of words
4299 is controlled by the definition of CUMULATIVE_ARGS. */
4302 expand_builtin_args_info (tree exp)
4304 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4305 int *word_ptr = (int *) &crtl->args.info;
4307 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4309 if (call_expr_nargs (exp) != 0)
4311 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4312 error ("argument of %<__builtin_args_info%> must be constant");
4315 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4317 if (wordnum < 0 || wordnum >= nwords)
4318 error ("argument of %<__builtin_args_info%> out of range");
4320 return GEN_INT (word_ptr[wordnum]);
4324 error ("missing argument in %<__builtin_args_info%>");
4329 /* Expand a call to __builtin_next_arg. */
4332 expand_builtin_next_arg (void)
4334 /* Checking arguments is already done in fold_builtin_next_arg
4335 that must be called before this function. */
4336 return expand_binop (ptr_mode, add_optab,
4337 crtl->args.internal_arg_pointer,
4338 crtl->args.arg_offset_rtx,
4339 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4342 /* Make it easier for the backends by protecting the valist argument
4343 from multiple evaluations. */
4346 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4348 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4350 gcc_assert (vatype != NULL_TREE);
4352 if (TREE_CODE (vatype) == ARRAY_TYPE)
4354 if (TREE_SIDE_EFFECTS (valist))
4355 valist = save_expr (valist);
4357 /* For this case, the backends will be expecting a pointer to
4358 vatype, but it's possible we've actually been given an array
4359 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4361 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4363 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4364 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4373 if (! TREE_SIDE_EFFECTS (valist))
4376 pt = build_pointer_type (vatype);
4377 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4378 TREE_SIDE_EFFECTS (valist) = 1;
4381 if (TREE_SIDE_EFFECTS (valist))
4382 valist = save_expr (valist);
4383 valist = build_fold_indirect_ref_loc (loc, valist);
4389 /* The "standard" definition of va_list is void*. */
4392 std_build_builtin_va_list (void)
4394 return ptr_type_node;
4397 /* The "standard" abi va_list is va_list_type_node. */
4400 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4402 return va_list_type_node;
4405 /* The "standard" type of va_list is va_list_type_node. */
4408 std_canonical_va_list_type (tree type)
4412 if (INDIRECT_REF_P (type))
4413 type = TREE_TYPE (type);
4414 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4415 type = TREE_TYPE (type);
4416 wtype = va_list_type_node;
4418 /* Treat structure va_list types. */
4419 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4420 htype = TREE_TYPE (htype);
4421 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4423 /* If va_list is an array type, the argument may have decayed
4424 to a pointer type, e.g. by being passed to another function.
4425 In that case, unwrap both types so that we can compare the
4426 underlying records. */
4427 if (TREE_CODE (htype) == ARRAY_TYPE
4428 || POINTER_TYPE_P (htype))
4430 wtype = TREE_TYPE (wtype);
4431 htype = TREE_TYPE (htype);
4434 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4435 return va_list_type_node;
4440 /* The "standard" implementation of va_start: just assign `nextarg' to
4444 std_expand_builtin_va_start (tree valist, rtx nextarg)
4446 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4447 convert_move (va_r, nextarg, 0);
4450 /* Expand EXP, a call to __builtin_va_start. */
4453 expand_builtin_va_start (tree exp)
4457 location_t loc = EXPR_LOCATION (exp);
4459 if (call_expr_nargs (exp) < 2)
4461 error_at (loc, "too few arguments to function %<va_start%>");
4465 if (fold_builtin_next_arg (exp, true))
4468 nextarg = expand_builtin_next_arg ();
4469 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4471 if (targetm.expand_builtin_va_start)
4472 targetm.expand_builtin_va_start (valist, nextarg);
4474 std_expand_builtin_va_start (valist, nextarg);
4479 /* The "standard" implementation of va_arg: read the value from the
4480 current (padded) address and increment by the (padded) size. */
4483 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4486 tree addr, t, type_size, rounded_size, valist_tmp;
4487 unsigned HOST_WIDE_INT align, boundary;
4490 #ifdef ARGS_GROW_DOWNWARD
4491 /* All of the alignment and movement below is for args-grow-up machines.
4492 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4493 implement their own specialized gimplify_va_arg_expr routines. */
4497 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4499 type = build_pointer_type (type);
4501 align = PARM_BOUNDARY / BITS_PER_UNIT;
4502 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4504 /* When we align parameter on stack for caller, if the parameter
4505 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4506 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4507 here with caller. */
4508 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4509 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4511 boundary /= BITS_PER_UNIT;
4513 /* Hoist the valist value into a temporary for the moment. */
4514 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4516 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4517 requires greater alignment, we must perform dynamic alignment. */
4518 if (boundary > align
4519 && !integer_zerop (TYPE_SIZE (type)))
4521 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4522 fold_build2 (POINTER_PLUS_EXPR,
4524 valist_tmp, size_int (boundary - 1)));
4525 gimplify_and_add (t, pre_p);
4527 t = fold_convert (sizetype, valist_tmp);
4528 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4529 fold_convert (TREE_TYPE (valist),
4530 fold_build2 (BIT_AND_EXPR, sizetype, t,
4531 size_int (-boundary))));
4532 gimplify_and_add (t, pre_p);
4537 /* If the actual alignment is less than the alignment of the type,
4538 adjust the type accordingly so that we don't assume strict alignment
4539 when dereferencing the pointer. */
4540 boundary *= BITS_PER_UNIT;
4541 if (boundary < TYPE_ALIGN (type))
4543 type = build_variant_type_copy (type);
4544 TYPE_ALIGN (type) = boundary;
4547 /* Compute the rounded size of the type. */
4548 type_size = size_in_bytes (type);
4549 rounded_size = round_up (type_size, align);
4551 /* Reduce rounded_size so it's sharable with the postqueue. */
4552 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4556 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4558 /* Small args are padded downward. */
4559 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4560 rounded_size, size_int (align));
4561 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4562 size_binop (MINUS_EXPR, rounded_size, type_size));
4563 addr = fold_build2 (POINTER_PLUS_EXPR,
4564 TREE_TYPE (addr), addr, t);
4567 /* Compute new value for AP. */
4568 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4569 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4570 gimplify_and_add (t, pre_p);
4572 addr = fold_convert (build_pointer_type (type), addr);
4575 addr = build_va_arg_indirect_ref (addr);
4577 return build_va_arg_indirect_ref (addr);
4580 /* Build an indirect-ref expression over the given TREE, which represents a
4581 piece of a va_arg() expansion. */
4583 build_va_arg_indirect_ref (tree addr)
4585 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4587 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4593 /* Return a dummy expression of type TYPE in order to keep going after an
4597 dummy_object (tree type)
4599 tree t = build_int_cst (build_pointer_type (type), 0);
4600 return build1 (INDIRECT_REF, type, t);
4603 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4604 builtin function, but a very special sort of operator. */
4606 enum gimplify_status
4607 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4609 tree promoted_type, have_va_type;
4610 tree valist = TREE_OPERAND (*expr_p, 0);
4611 tree type = TREE_TYPE (*expr_p);
4613 location_t loc = EXPR_LOCATION (*expr_p);
4615 /* Verify that valist is of the proper type. */
4616 have_va_type = TREE_TYPE (valist);
4617 if (have_va_type == error_mark_node)
4619 have_va_type = targetm.canonical_va_list_type (have_va_type);
4621 if (have_va_type == NULL_TREE)
4623 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4627 /* Generate a diagnostic for requesting data of a type that cannot
4628 be passed through `...' due to type promotion at the call site. */
4629 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4632 static bool gave_help;
4635 /* Unfortunately, this is merely undefined, rather than a constraint
4636 violation, so we cannot make this an error. If this call is never
4637 executed, the program is still strictly conforming. */
4638 warned = warning_at (loc, 0,
4639 "%qT is promoted to %qT when passed through %<...%>",
4640 type, promoted_type);
4641 if (!gave_help && warned)
4644 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4645 promoted_type, type);
4648 /* We can, however, treat "undefined" any way we please.
4649 Call abort to encourage the user to fix the program. */
4651 inform (loc, "if this code is reached, the program will abort");
4652 /* Before the abort, allow the evaluation of the va_list
4653 expression to exit or longjmp. */
4654 gimplify_and_add (valist, pre_p);
4655 t = build_call_expr_loc (loc,
4656 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4657 gimplify_and_add (t, pre_p);
4659 /* This is dead code, but go ahead and finish so that the
4660 mode of the result comes out right. */
4661 *expr_p = dummy_object (type);
4666 /* Make it easier for the backends by protecting the valist argument
4667 from multiple evaluations. */
4668 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4670 /* For this case, the backends will be expecting a pointer to
4671 TREE_TYPE (abi), but it's possible we've
4672 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4674 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4676 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4677 valist = fold_convert_loc (loc, p1,
4678 build_fold_addr_expr_loc (loc, valist));
4681 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4684 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4686 if (!targetm.gimplify_va_arg_expr)
4687 /* FIXME: Once most targets are converted we should merely
4688 assert this is non-null. */
4691 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4696 /* Expand EXP, a call to __builtin_va_end. */
4699 expand_builtin_va_end (tree exp)
4701 tree valist = CALL_EXPR_ARG (exp, 0);
4703 /* Evaluate for side effects, if needed. I hate macros that don't
4705 if (TREE_SIDE_EFFECTS (valist))
4706 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4711 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4712 builtin rather than just as an assignment in stdarg.h because of the
4713 nastiness of array-type va_list types. */
4716 expand_builtin_va_copy (tree exp)
4719 location_t loc = EXPR_LOCATION (exp);
4721 dst = CALL_EXPR_ARG (exp, 0);
4722 src = CALL_EXPR_ARG (exp, 1);
4724 dst = stabilize_va_list_loc (loc, dst, 1);
4725 src = stabilize_va_list_loc (loc, src, 0);
4727 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4729 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4731 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4732 TREE_SIDE_EFFECTS (t) = 1;
4733 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4737 rtx dstb, srcb, size;
4739 /* Evaluate to pointers. */
4740 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4741 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4742 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4743 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4745 dstb = convert_memory_address (Pmode, dstb);
4746 srcb = convert_memory_address (Pmode, srcb);
4748 /* "Dereference" to BLKmode memories. */
4749 dstb = gen_rtx_MEM (BLKmode, dstb);
4750 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4751 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4752 srcb = gen_rtx_MEM (BLKmode, srcb);
4753 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4754 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4757 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4763 /* Expand a call to one of the builtin functions __builtin_frame_address or
4764 __builtin_return_address. */
4767 expand_builtin_frame_address (tree fndecl, tree exp)
4769 /* The argument must be a nonnegative integer constant.
4770 It counts the number of frames to scan up the stack.
4771 The value is the return address saved in that frame. */
4772 if (call_expr_nargs (exp) == 0)
4773 /* Warning about missing arg was already issued. */
4775 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4777 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4778 error ("invalid argument to %<__builtin_frame_address%>");
4780 error ("invalid argument to %<__builtin_return_address%>");
4786 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4787 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4789 /* Some ports cannot access arbitrary stack frames. */
4792 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4793 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4795 warning (0, "unsupported argument to %<__builtin_return_address%>");
4799 /* For __builtin_frame_address, return what we've got. */
4800 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4804 && ! CONSTANT_P (tem))
4805 tem = copy_to_mode_reg (Pmode, tem);
4810 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4811 we failed and the caller should emit a normal call, otherwise try to get
4812 the result in TARGET, if convenient. */
4815 expand_builtin_alloca (tree exp, rtx target)
4820 /* Emit normal call if marked not-inlineable. */
4821 if (CALL_CANNOT_INLINE_P (exp))
4824 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4827 /* Compute the argument. */
4828 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4830 /* Allocate the desired space. */
4831 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4832 result = convert_memory_address (ptr_mode, result);
4837 /* Expand a call to a bswap builtin with argument ARG0. MODE
4838 is the mode to expand with. */
4841 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4843 enum machine_mode mode;
4847 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4850 arg = CALL_EXPR_ARG (exp, 0);
4851 mode = TYPE_MODE (TREE_TYPE (arg));
4852 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4854 target = expand_unop (mode, bswap_optab, op0, target, 1);
4856 gcc_assert (target);
4858 return convert_to_mode (mode, target, 0);
4861 /* Expand a call to a unary builtin in EXP.
4862 Return NULL_RTX if a normal call should be emitted rather than expanding the
4863 function in-line. If convenient, the result should be placed in TARGET.
4864 SUBTARGET may be used as the target for computing one of EXP's operands. */
4867 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4868 rtx subtarget, optab op_optab)
4872 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4875 /* Compute the argument. */
4876 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4877 VOIDmode, EXPAND_NORMAL);
4878 /* Compute op, into TARGET if possible.
4879 Set TARGET to wherever the result comes back. */
4880 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4881 op_optab, op0, target, 1);
4882 gcc_assert (target);
4884 return convert_to_mode (target_mode, target, 0);
4887 /* Expand a call to __builtin_expect. We just return our argument
4888 as the builtin_expect semantic should've been already executed by
4889 tree branch prediction pass. */
4892 expand_builtin_expect (tree exp, rtx target)
4896 if (call_expr_nargs (exp) < 2)
4898 arg = CALL_EXPR_ARG (exp, 0);
4900 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4901 /* When guessing was done, the hints should be already stripped away. */
4902 gcc_assert (!flag_guess_branch_prob
4903 || optimize == 0 || errorcount || sorrycount);
4908 expand_builtin_trap (void)
4912 emit_insn (gen_trap ());
4915 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4919 /* Expand a call to __builtin_unreachable. We do nothing except emit
4920 a barrier saying that control flow will not pass here.
4922 It is the responsibility of the program being compiled to ensure
4923 that control flow does never reach __builtin_unreachable. */
4925 expand_builtin_unreachable (void)
4930 /* Expand EXP, a call to fabs, fabsf or fabsl.
4931 Return NULL_RTX if a normal call should be emitted rather than expanding
4932 the function inline. If convenient, the result should be placed
4933 in TARGET. SUBTARGET may be used as the target for computing
4937 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4939 enum machine_mode mode;
4943 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4946 arg = CALL_EXPR_ARG (exp, 0);
4947 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4948 mode = TYPE_MODE (TREE_TYPE (arg));
4949 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4950 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4953 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4954 Return NULL is a normal call should be emitted rather than expanding the
4955 function inline. If convenient, the result should be placed in TARGET.
4956 SUBTARGET may be used as the target for computing the operand. */
4959 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4964 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4967 arg = CALL_EXPR_ARG (exp, 0);
4968 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4970 arg = CALL_EXPR_ARG (exp, 1);
4971 op1 = expand_normal (arg);
4973 return expand_copysign (op0, op1, target);
4976 /* Create a new constant string literal and return a char* pointer to it.
4977 The STRING_CST value is the LEN characters at STR. */
4979 build_string_literal (int len, const char *str)
4981 tree t, elem, index, type;
4983 t = build_string (len, str);
4984 elem = build_type_variant (char_type_node, 1, 0);
4985 index = build_index_type (size_int (len - 1));
4986 type = build_array_type (elem, index);
4987 TREE_TYPE (t) = type;
4988 TREE_CONSTANT (t) = 1;
4989 TREE_READONLY (t) = 1;
4990 TREE_STATIC (t) = 1;
4992 type = build_pointer_type (elem);
4993 t = build1 (ADDR_EXPR, type,
4994 build4 (ARRAY_REF, elem,
4995 t, integer_zero_node, NULL_TREE, NULL_TREE));
4999 /* Expand a call to either the entry or exit function profiler. */
5002 expand_builtin_profile_func (bool exitp)
5004 rtx this_rtx, which;
5006 this_rtx = DECL_RTL (current_function_decl);
5007 gcc_assert (MEM_P (this_rtx));
5008 this_rtx = XEXP (this_rtx, 0);
5011 which = profile_function_exit_libfunc;
5013 which = profile_function_entry_libfunc;
5015 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5016 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5023 /* Expand a call to __builtin___clear_cache. */
5026 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5028 #ifndef HAVE_clear_cache
5029 #ifdef CLEAR_INSN_CACHE
5030 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5031 does something. Just do the default expansion to a call to
5035 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5036 does nothing. There is no need to call it. Do nothing. */
5038 #endif /* CLEAR_INSN_CACHE */
5040 /* We have a "clear_cache" insn, and it will handle everything. */
5042 rtx begin_rtx, end_rtx;
5043 enum insn_code icode;
5045 /* We must not expand to a library call. If we did, any
5046 fallback library function in libgcc that might contain a call to
5047 __builtin___clear_cache() would recurse infinitely. */
5048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5050 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5054 if (HAVE_clear_cache)
5056 icode = CODE_FOR_clear_cache;
5058 begin = CALL_EXPR_ARG (exp, 0);
5059 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5060 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5061 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5062 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5064 end = CALL_EXPR_ARG (exp, 1);
5065 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5066 end_rtx = convert_memory_address (Pmode, end_rtx);
5067 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5068 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5070 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5073 #endif /* HAVE_clear_cache */
5076 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5079 round_trampoline_addr (rtx tramp)
5081 rtx temp, addend, mask;
5083 /* If we don't need too much alignment, we'll have been guaranteed
5084 proper alignment by get_trampoline_type. */
5085 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5088 /* Round address up to desired boundary. */
5089 temp = gen_reg_rtx (Pmode);
5090 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5091 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5093 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5094 temp, 0, OPTAB_LIB_WIDEN);
5095 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5096 temp, 0, OPTAB_LIB_WIDEN);
5102 expand_builtin_init_trampoline (tree exp)
5104 tree t_tramp, t_func, t_chain;
5105 rtx m_tramp, r_tramp, r_chain, tmp;
5107 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5108 POINTER_TYPE, VOID_TYPE))
5111 t_tramp = CALL_EXPR_ARG (exp, 0);
5112 t_func = CALL_EXPR_ARG (exp, 1);
5113 t_chain = CALL_EXPR_ARG (exp, 2);
5115 r_tramp = expand_normal (t_tramp);
5116 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5117 MEM_NOTRAP_P (m_tramp) = 1;
5119 /* The TRAMP argument should be the address of a field within the
5120 local function's FRAME decl. Let's see if we can fill in the
5121 to fill in the MEM_ATTRs for this memory. */
5122 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5123 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5126 tmp = round_trampoline_addr (r_tramp);
5129 m_tramp = change_address (m_tramp, BLKmode, tmp);
5130 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5131 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5134 /* The FUNC argument should be the address of the nested function.
5135 Extract the actual function decl to pass to the hook. */
5136 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5137 t_func = TREE_OPERAND (t_func, 0);
5138 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5140 r_chain = expand_normal (t_chain);
5142 /* Generate insns to initialize the trampoline. */
5143 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5145 trampolines_created = 1;
5150 expand_builtin_adjust_trampoline (tree exp)
5154 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5157 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5158 tramp = round_trampoline_addr (tramp);
5159 if (targetm.calls.trampoline_adjust_address)
5160 tramp = targetm.calls.trampoline_adjust_address (tramp);
5165 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5166 function. The function first checks whether the back end provides
5167 an insn to implement signbit for the respective mode. If not, it
5168 checks whether the floating point format of the value is such that
5169 the sign bit can be extracted. If that is not the case, the
5170 function returns NULL_RTX to indicate that a normal call should be
5171 emitted rather than expanding the function in-line. EXP is the
5172 expression that is a call to the builtin function; if convenient,
5173 the result should be placed in TARGET. */
5175 expand_builtin_signbit (tree exp, rtx target)
5177 const struct real_format *fmt;
5178 enum machine_mode fmode, imode, rmode;
5179 HOST_WIDE_INT hi, lo;
5182 enum insn_code icode;
5184 location_t loc = EXPR_LOCATION (exp);
5186 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5189 arg = CALL_EXPR_ARG (exp, 0);
5190 fmode = TYPE_MODE (TREE_TYPE (arg));
5191 rmode = TYPE_MODE (TREE_TYPE (exp));
5192 fmt = REAL_MODE_FORMAT (fmode);
5194 arg = builtin_save_expr (arg);
5196 /* Expand the argument yielding a RTX expression. */
5197 temp = expand_normal (arg);
5199 /* Check if the back end provides an insn that handles signbit for the
5201 icode = signbit_optab->handlers [(int) fmode].insn_code;
5202 if (icode != CODE_FOR_nothing)
5204 rtx last = get_last_insn ();
5205 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5206 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5208 delete_insns_since (last);
5211 /* For floating point formats without a sign bit, implement signbit
5213 bitpos = fmt->signbit_ro;
5216 /* But we can't do this if the format supports signed zero. */
5217 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5220 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5221 build_real (TREE_TYPE (arg), dconst0));
5222 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5225 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5227 imode = int_mode_for_mode (fmode);
5228 if (imode == BLKmode)
5230 temp = gen_lowpart (imode, temp);
5235 /* Handle targets with different FP word orders. */
5236 if (FLOAT_WORDS_BIG_ENDIAN)
5237 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5239 word = bitpos / BITS_PER_WORD;
5240 temp = operand_subword_force (temp, word, fmode);
5241 bitpos = bitpos % BITS_PER_WORD;
5244 /* Force the intermediate word_mode (or narrower) result into a
5245 register. This avoids attempting to create paradoxical SUBREGs
5246 of floating point modes below. */
5247 temp = force_reg (imode, temp);
5249 /* If the bitpos is within the "result mode" lowpart, the operation
5250 can be implement with a single bitwise AND. Otherwise, we need
5251 a right shift and an AND. */
5253 if (bitpos < GET_MODE_BITSIZE (rmode))
5255 if (bitpos < HOST_BITS_PER_WIDE_INT)
5258 lo = (HOST_WIDE_INT) 1 << bitpos;
5262 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5266 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5267 temp = gen_lowpart (rmode, temp);
5268 temp = expand_binop (rmode, and_optab, temp,
5269 immed_double_const (lo, hi, rmode),
5270 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5274 /* Perform a logical right shift to place the signbit in the least
5275 significant bit, then truncate the result to the desired mode
5276 and mask just this bit. */
5277 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5278 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5279 temp = gen_lowpart (rmode, temp);
5280 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5281 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5287 /* Expand fork or exec calls. TARGET is the desired target of the
5288 call. EXP is the call. FN is the
5289 identificator of the actual function. IGNORE is nonzero if the
5290 value is to be ignored. */
5293 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5298 /* If we are not profiling, just call the function. */
5299 if (!profile_arc_flag)
5302 /* Otherwise call the wrapper. This should be equivalent for the rest of
5303 compiler, so the code does not diverge, and the wrapper may run the
5304 code necessary for keeping the profiling sane. */
5306 switch (DECL_FUNCTION_CODE (fn))
5309 id = get_identifier ("__gcov_fork");
5312 case BUILT_IN_EXECL:
5313 id = get_identifier ("__gcov_execl");
5316 case BUILT_IN_EXECV:
5317 id = get_identifier ("__gcov_execv");
5320 case BUILT_IN_EXECLP:
5321 id = get_identifier ("__gcov_execlp");
5324 case BUILT_IN_EXECLE:
5325 id = get_identifier ("__gcov_execle");
5328 case BUILT_IN_EXECVP:
5329 id = get_identifier ("__gcov_execvp");
5332 case BUILT_IN_EXECVE:
5333 id = get_identifier ("__gcov_execve");
5340 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5341 FUNCTION_DECL, id, TREE_TYPE (fn));
5342 DECL_EXTERNAL (decl) = 1;
5343 TREE_PUBLIC (decl) = 1;
5344 DECL_ARTIFICIAL (decl) = 1;
5345 TREE_NOTHROW (decl) = 1;
5346 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5347 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5348 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5349 return expand_call (call, target, ignore);
5354 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5355 the pointer in these functions is void*, the tree optimizers may remove
5356 casts. The mode computed in expand_builtin isn't reliable either, due
5357 to __sync_bool_compare_and_swap.
5359 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5360 group of builtins. This gives us log2 of the mode size. */
5362 static inline enum machine_mode
5363 get_builtin_sync_mode (int fcode_diff)
5365 /* The size is not negotiable, so ask not to get BLKmode in return
5366 if the target indicates that a smaller size would be better. */
5367 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5370 /* Expand the memory expression LOC and return the appropriate memory operand
5371 for the builtin_sync operations. */
5374 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5378 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5379 addr = convert_memory_address (Pmode, addr);
5381 /* Note that we explicitly do not want any alias information for this
5382 memory, so that we kill all other live memories. Otherwise we don't
5383 satisfy the full barrier semantics of the intrinsic. */
5384 mem = validize_mem (gen_rtx_MEM (mode, addr));
5386 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5387 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5388 MEM_VOLATILE_P (mem) = 1;
5393 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5394 EXP is the CALL_EXPR. CODE is the rtx code
5395 that corresponds to the arithmetic or logical operation from the name;
5396 an exception here is that NOT actually means NAND. TARGET is an optional
5397 place for us to store the results; AFTER is true if this is the
5398 fetch_and_xxx form. IGNORE is true if we don't actually care about
5399 the result of the operation at all. */
5402 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5403 enum rtx_code code, bool after,
5404 rtx target, bool ignore)
5407 enum machine_mode old_mode;
5408 location_t loc = EXPR_LOCATION (exp);
5410 if (code == NOT && warn_sync_nand)
5412 tree fndecl = get_callee_fndecl (exp);
5413 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5415 static bool warned_f_a_n, warned_n_a_f;
5419 case BUILT_IN_FETCH_AND_NAND_1:
5420 case BUILT_IN_FETCH_AND_NAND_2:
5421 case BUILT_IN_FETCH_AND_NAND_4:
5422 case BUILT_IN_FETCH_AND_NAND_8:
5423 case BUILT_IN_FETCH_AND_NAND_16:
5428 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5429 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5430 warned_f_a_n = true;
5433 case BUILT_IN_NAND_AND_FETCH_1:
5434 case BUILT_IN_NAND_AND_FETCH_2:
5435 case BUILT_IN_NAND_AND_FETCH_4:
5436 case BUILT_IN_NAND_AND_FETCH_8:
5437 case BUILT_IN_NAND_AND_FETCH_16:
5442 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5443 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5444 warned_n_a_f = true;
5452 /* Expand the operands. */
5453 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5455 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5456 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5457 of CONST_INTs, where we know the old_mode only from the call argument. */
5458 old_mode = GET_MODE (val);
5459 if (old_mode == VOIDmode)
5460 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5461 val = convert_modes (mode, old_mode, val, 1);
5464 return expand_sync_operation (mem, val, code);
5466 return expand_sync_fetch_operation (mem, val, code, after, target);
5469 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5470 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5471 true if this is the boolean form. TARGET is a place for us to store the
5472 results; this is NOT optional if IS_BOOL is true. */
5475 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5476 bool is_bool, rtx target)
5478 rtx old_val, new_val, mem;
5479 enum machine_mode old_mode;
5481 /* Expand the operands. */
5482 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5485 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5486 mode, EXPAND_NORMAL);
5487 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5488 of CONST_INTs, where we know the old_mode only from the call argument. */
5489 old_mode = GET_MODE (old_val);
5490 if (old_mode == VOIDmode)
5491 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5492 old_val = convert_modes (mode, old_mode, old_val, 1);
5494 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5495 mode, EXPAND_NORMAL);
5496 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5497 of CONST_INTs, where we know the old_mode only from the call argument. */
5498 old_mode = GET_MODE (new_val);
5499 if (old_mode == VOIDmode)
5500 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5501 new_val = convert_modes (mode, old_mode, new_val, 1);
5504 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5506 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5509 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5510 general form is actually an atomic exchange, and some targets only
5511 support a reduced form with the second argument being a constant 1.
5512 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5516 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5520 enum machine_mode old_mode;
5522 /* Expand the operands. */
5523 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5524 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5525 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5526 of CONST_INTs, where we know the old_mode only from the call argument. */
5527 old_mode = GET_MODE (val);
5528 if (old_mode == VOIDmode)
5529 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5530 val = convert_modes (mode, old_mode, val, 1);
5532 return expand_sync_lock_test_and_set (mem, val, target);
5535 /* Expand the __sync_synchronize intrinsic. */
5538 expand_builtin_synchronize (void)
5541 VEC (tree, gc) *v_clobbers;
5543 #ifdef HAVE_memory_barrier
5544 if (HAVE_memory_barrier)
5546 emit_insn (gen_memory_barrier ());
5551 if (synchronize_libfunc != NULL_RTX)
5553 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5557 /* If no explicit memory barrier instruction is available, create an
5558 empty asm stmt with a memory clobber. */
5559 v_clobbers = VEC_alloc (tree, gc, 1);
5560 VEC_quick_push (tree, v_clobbers,
5561 tree_cons (NULL, build_string (6, "memory"), NULL));
5562 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5563 gimple_asm_set_volatile (x, true);
5564 expand_asm_stmt (x);
5567 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5570 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5572 enum insn_code icode;
5574 rtx val = const0_rtx;
5576 /* Expand the operands. */
5577 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5579 /* If there is an explicit operation in the md file, use it. */
5580 icode = sync_lock_release[mode];
5581 if (icode != CODE_FOR_nothing)
5583 if (!insn_data[icode].operand[1].predicate (val, mode))
5584 val = force_reg (mode, val);
5586 insn = GEN_FCN (icode) (mem, val);
5594 /* Otherwise we can implement this operation by emitting a barrier
5595 followed by a store of zero. */
5596 expand_builtin_synchronize ();
5597 emit_move_insn (mem, val);
5600 /* Expand an expression EXP that calls a built-in function,
5601 with result going to TARGET if that's convenient
5602 (and in mode MODE if that's convenient).
5603 SUBTARGET may be used as the target for computing one of EXP's operands.
5604 IGNORE is nonzero if the value is to be ignored. */
5607 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5610 tree fndecl = get_callee_fndecl (exp);
5611 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5612 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5614 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5615 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5617 /* When not optimizing, generate calls to library functions for a certain
5620 && !called_as_built_in (fndecl)
5621 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5622 && fcode != BUILT_IN_ALLOCA
5623 && fcode != BUILT_IN_FREE)
5624 return expand_call (exp, target, ignore);
5626 /* The built-in function expanders test for target == const0_rtx
5627 to determine whether the function's result will be ignored. */
5629 target = const0_rtx;
5631 /* If the result of a pure or const built-in function is ignored, and
5632 none of its arguments are volatile, we can avoid expanding the
5633 built-in call and just evaluate the arguments for side-effects. */
5634 if (target == const0_rtx
5635 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5637 bool volatilep = false;
5639 call_expr_arg_iterator iter;
5641 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5642 if (TREE_THIS_VOLATILE (arg))
5650 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5651 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5658 CASE_FLT_FN (BUILT_IN_FABS):
5659 target = expand_builtin_fabs (exp, target, subtarget);
5664 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5665 target = expand_builtin_copysign (exp, target, subtarget);
5670 /* Just do a normal library call if we were unable to fold
5672 CASE_FLT_FN (BUILT_IN_CABS):
5675 CASE_FLT_FN (BUILT_IN_EXP):
5676 CASE_FLT_FN (BUILT_IN_EXP10):
5677 CASE_FLT_FN (BUILT_IN_POW10):
5678 CASE_FLT_FN (BUILT_IN_EXP2):
5679 CASE_FLT_FN (BUILT_IN_EXPM1):
5680 CASE_FLT_FN (BUILT_IN_LOGB):
5681 CASE_FLT_FN (BUILT_IN_LOG):
5682 CASE_FLT_FN (BUILT_IN_LOG10):
5683 CASE_FLT_FN (BUILT_IN_LOG2):
5684 CASE_FLT_FN (BUILT_IN_LOG1P):
5685 CASE_FLT_FN (BUILT_IN_TAN):
5686 CASE_FLT_FN (BUILT_IN_ASIN):
5687 CASE_FLT_FN (BUILT_IN_ACOS):
5688 CASE_FLT_FN (BUILT_IN_ATAN):
5689 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5690 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5691 because of possible accuracy problems. */
5692 if (! flag_unsafe_math_optimizations)
5694 CASE_FLT_FN (BUILT_IN_SQRT):
5695 CASE_FLT_FN (BUILT_IN_FLOOR):
5696 CASE_FLT_FN (BUILT_IN_CEIL):
5697 CASE_FLT_FN (BUILT_IN_TRUNC):
5698 CASE_FLT_FN (BUILT_IN_ROUND):
5699 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5700 CASE_FLT_FN (BUILT_IN_RINT):
5701 target = expand_builtin_mathfn (exp, target, subtarget);
5706 CASE_FLT_FN (BUILT_IN_ILOGB):
5707 if (! flag_unsafe_math_optimizations)
5709 CASE_FLT_FN (BUILT_IN_ISINF):
5710 CASE_FLT_FN (BUILT_IN_FINITE):
5711 case BUILT_IN_ISFINITE:
5712 case BUILT_IN_ISNORMAL:
5713 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5718 CASE_FLT_FN (BUILT_IN_LCEIL):
5719 CASE_FLT_FN (BUILT_IN_LLCEIL):
5720 CASE_FLT_FN (BUILT_IN_LFLOOR):
5721 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5722 target = expand_builtin_int_roundingfn (exp, target);
5727 CASE_FLT_FN (BUILT_IN_LRINT):
5728 CASE_FLT_FN (BUILT_IN_LLRINT):
5729 CASE_FLT_FN (BUILT_IN_LROUND):
5730 CASE_FLT_FN (BUILT_IN_LLROUND):
5731 target = expand_builtin_int_roundingfn_2 (exp, target);
5736 CASE_FLT_FN (BUILT_IN_POW):
5737 target = expand_builtin_pow (exp, target, subtarget);
5742 CASE_FLT_FN (BUILT_IN_POWI):
5743 target = expand_builtin_powi (exp, target, subtarget);
5748 CASE_FLT_FN (BUILT_IN_ATAN2):
5749 CASE_FLT_FN (BUILT_IN_LDEXP):
5750 CASE_FLT_FN (BUILT_IN_SCALB):
5751 CASE_FLT_FN (BUILT_IN_SCALBN):
5752 CASE_FLT_FN (BUILT_IN_SCALBLN):
5753 if (! flag_unsafe_math_optimizations)
5756 CASE_FLT_FN (BUILT_IN_FMOD):
5757 CASE_FLT_FN (BUILT_IN_REMAINDER):
5758 CASE_FLT_FN (BUILT_IN_DREM):
5759 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5764 CASE_FLT_FN (BUILT_IN_CEXPI):
5765 target = expand_builtin_cexpi (exp, target, subtarget);
5766 gcc_assert (target);
5769 CASE_FLT_FN (BUILT_IN_SIN):
5770 CASE_FLT_FN (BUILT_IN_COS):
5771 if (! flag_unsafe_math_optimizations)
5773 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5778 CASE_FLT_FN (BUILT_IN_SINCOS):
5779 if (! flag_unsafe_math_optimizations)
5781 target = expand_builtin_sincos (exp);
5786 case BUILT_IN_APPLY_ARGS:
5787 return expand_builtin_apply_args ();
5789 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5790 FUNCTION with a copy of the parameters described by
5791 ARGUMENTS, and ARGSIZE. It returns a block of memory
5792 allocated on the stack into which is stored all the registers
5793 that might possibly be used for returning the result of a
5794 function. ARGUMENTS is the value returned by
5795 __builtin_apply_args. ARGSIZE is the number of bytes of
5796 arguments that must be copied. ??? How should this value be
5797 computed? We'll also need a safe worst case value for varargs
5799 case BUILT_IN_APPLY:
5800 if (!validate_arglist (exp, POINTER_TYPE,
5801 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5802 && !validate_arglist (exp, REFERENCE_TYPE,
5803 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5809 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5810 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5811 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5813 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5816 /* __builtin_return (RESULT) causes the function to return the
5817 value described by RESULT. RESULT is address of the block of
5818 memory returned by __builtin_apply. */
5819 case BUILT_IN_RETURN:
5820 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5821 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5824 case BUILT_IN_SAVEREGS:
5825 return expand_builtin_saveregs ();
5827 case BUILT_IN_ARGS_INFO:
5828 return expand_builtin_args_info (exp);
5830 case BUILT_IN_VA_ARG_PACK:
5831 /* All valid uses of __builtin_va_arg_pack () are removed during
5833 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5836 case BUILT_IN_VA_ARG_PACK_LEN:
5837 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5839 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5842 /* Return the address of the first anonymous stack arg. */
5843 case BUILT_IN_NEXT_ARG:
5844 if (fold_builtin_next_arg (exp, false))
5846 return expand_builtin_next_arg ();
5848 case BUILT_IN_CLEAR_CACHE:
5849 target = expand_builtin___clear_cache (exp);
5854 case BUILT_IN_CLASSIFY_TYPE:
5855 return expand_builtin_classify_type (exp);
5857 case BUILT_IN_CONSTANT_P:
5860 case BUILT_IN_FRAME_ADDRESS:
5861 case BUILT_IN_RETURN_ADDRESS:
5862 return expand_builtin_frame_address (fndecl, exp);
5864 /* Returns the address of the area where the structure is returned.
5866 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5867 if (call_expr_nargs (exp) != 0
5868 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5869 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5872 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5874 case BUILT_IN_ALLOCA:
5875 target = expand_builtin_alloca (exp, target);
5880 case BUILT_IN_STACK_SAVE:
5881 return expand_stack_save ();
5883 case BUILT_IN_STACK_RESTORE:
5884 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5887 case BUILT_IN_BSWAP32:
5888 case BUILT_IN_BSWAP64:
5889 target = expand_builtin_bswap (exp, target, subtarget);
5895 CASE_INT_FN (BUILT_IN_FFS):
5896 case BUILT_IN_FFSIMAX:
5897 target = expand_builtin_unop (target_mode, exp, target,
5898 subtarget, ffs_optab);
5903 CASE_INT_FN (BUILT_IN_CLZ):
5904 case BUILT_IN_CLZIMAX:
5905 target = expand_builtin_unop (target_mode, exp, target,
5906 subtarget, clz_optab);
5911 CASE_INT_FN (BUILT_IN_CTZ):
5912 case BUILT_IN_CTZIMAX:
5913 target = expand_builtin_unop (target_mode, exp, target,
5914 subtarget, ctz_optab);
5919 CASE_INT_FN (BUILT_IN_POPCOUNT):
5920 case BUILT_IN_POPCOUNTIMAX:
5921 target = expand_builtin_unop (target_mode, exp, target,
5922 subtarget, popcount_optab);
5927 CASE_INT_FN (BUILT_IN_PARITY):
5928 case BUILT_IN_PARITYIMAX:
5929 target = expand_builtin_unop (target_mode, exp, target,
5930 subtarget, parity_optab);
5935 case BUILT_IN_STRLEN:
5936 target = expand_builtin_strlen (exp, target, target_mode);
5941 case BUILT_IN_STRCPY:
5942 target = expand_builtin_strcpy (exp, target);
5947 case BUILT_IN_STRNCPY:
5948 target = expand_builtin_strncpy (exp, target);
5953 case BUILT_IN_STPCPY:
5954 target = expand_builtin_stpcpy (exp, target, mode);
5959 case BUILT_IN_MEMCPY:
5960 target = expand_builtin_memcpy (exp, target);
5965 case BUILT_IN_MEMPCPY:
5966 target = expand_builtin_mempcpy (exp, target, mode);
5971 case BUILT_IN_MEMSET:
5972 target = expand_builtin_memset (exp, target, mode);
5977 case BUILT_IN_BZERO:
5978 target = expand_builtin_bzero (exp);
5983 case BUILT_IN_STRCMP:
5984 target = expand_builtin_strcmp (exp, target);
5989 case BUILT_IN_STRNCMP:
5990 target = expand_builtin_strncmp (exp, target, mode);
5996 case BUILT_IN_MEMCMP:
5997 target = expand_builtin_memcmp (exp, target, mode);
6002 case BUILT_IN_SETJMP:
6003 /* This should have been lowered to the builtins below. */
6006 case BUILT_IN_SETJMP_SETUP:
6007 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6008 and the receiver label. */
6009 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6011 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6012 VOIDmode, EXPAND_NORMAL);
6013 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6014 rtx label_r = label_rtx (label);
6016 /* This is copied from the handling of non-local gotos. */
6017 expand_builtin_setjmp_setup (buf_addr, label_r);
6018 nonlocal_goto_handler_labels
6019 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6020 nonlocal_goto_handler_labels);
6021 /* ??? Do not let expand_label treat us as such since we would
6022 not want to be both on the list of non-local labels and on
6023 the list of forced labels. */
6024 FORCED_LABEL (label) = 0;
6029 case BUILT_IN_SETJMP_DISPATCHER:
6030 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6031 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6033 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6034 rtx label_r = label_rtx (label);
6036 /* Remove the dispatcher label from the list of non-local labels
6037 since the receiver labels have been added to it above. */
6038 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6043 case BUILT_IN_SETJMP_RECEIVER:
6044 /* __builtin_setjmp_receiver is passed the receiver label. */
6045 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6047 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6048 rtx label_r = label_rtx (label);
6050 expand_builtin_setjmp_receiver (label_r);
6055 /* __builtin_longjmp is passed a pointer to an array of five words.
6056 It's similar to the C library longjmp function but works with
6057 __builtin_setjmp above. */
6058 case BUILT_IN_LONGJMP:
6059 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6061 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6062 VOIDmode, EXPAND_NORMAL);
6063 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6065 if (value != const1_rtx)
6067 error ("%<__builtin_longjmp%> second argument must be 1");
6071 expand_builtin_longjmp (buf_addr, value);
6076 case BUILT_IN_NONLOCAL_GOTO:
6077 target = expand_builtin_nonlocal_goto (exp);
6082 /* This updates the setjmp buffer that is its argument with the value
6083 of the current stack pointer. */
6084 case BUILT_IN_UPDATE_SETJMP_BUF:
6085 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6088 = expand_normal (CALL_EXPR_ARG (exp, 0));
6090 expand_builtin_update_setjmp_buf (buf_addr);
6096 expand_builtin_trap ();
6099 case BUILT_IN_UNREACHABLE:
6100 expand_builtin_unreachable ();
6103 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6104 case BUILT_IN_SIGNBITD32:
6105 case BUILT_IN_SIGNBITD64:
6106 case BUILT_IN_SIGNBITD128:
6107 target = expand_builtin_signbit (exp, target);
6112 /* Various hooks for the DWARF 2 __throw routine. */
6113 case BUILT_IN_UNWIND_INIT:
6114 expand_builtin_unwind_init ();
6116 case BUILT_IN_DWARF_CFA:
6117 return virtual_cfa_rtx;
6118 #ifdef DWARF2_UNWIND_INFO
6119 case BUILT_IN_DWARF_SP_COLUMN:
6120 return expand_builtin_dwarf_sp_column ();
6121 case BUILT_IN_INIT_DWARF_REG_SIZES:
6122 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6125 case BUILT_IN_FROB_RETURN_ADDR:
6126 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6127 case BUILT_IN_EXTRACT_RETURN_ADDR:
6128 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6129 case BUILT_IN_EH_RETURN:
6130 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6131 CALL_EXPR_ARG (exp, 1));
6133 #ifdef EH_RETURN_DATA_REGNO
6134 case BUILT_IN_EH_RETURN_DATA_REGNO:
6135 return expand_builtin_eh_return_data_regno (exp);
6137 case BUILT_IN_EXTEND_POINTER:
6138 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6139 case BUILT_IN_EH_POINTER:
6140 return expand_builtin_eh_pointer (exp);
6141 case BUILT_IN_EH_FILTER:
6142 return expand_builtin_eh_filter (exp);
6143 case BUILT_IN_EH_COPY_VALUES:
6144 return expand_builtin_eh_copy_values (exp);
6146 case BUILT_IN_VA_START:
6147 return expand_builtin_va_start (exp);
6148 case BUILT_IN_VA_END:
6149 return expand_builtin_va_end (exp);
6150 case BUILT_IN_VA_COPY:
6151 return expand_builtin_va_copy (exp);
6152 case BUILT_IN_EXPECT:
6153 return expand_builtin_expect (exp, target);
6154 case BUILT_IN_PREFETCH:
6155 expand_builtin_prefetch (exp);
6158 case BUILT_IN_PROFILE_FUNC_ENTER:
6159 return expand_builtin_profile_func (false);
6160 case BUILT_IN_PROFILE_FUNC_EXIT:
6161 return expand_builtin_profile_func (true);
6163 case BUILT_IN_INIT_TRAMPOLINE:
6164 return expand_builtin_init_trampoline (exp);
6165 case BUILT_IN_ADJUST_TRAMPOLINE:
6166 return expand_builtin_adjust_trampoline (exp);
6169 case BUILT_IN_EXECL:
6170 case BUILT_IN_EXECV:
6171 case BUILT_IN_EXECLP:
6172 case BUILT_IN_EXECLE:
6173 case BUILT_IN_EXECVP:
6174 case BUILT_IN_EXECVE:
6175 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6180 case BUILT_IN_FETCH_AND_ADD_1:
6181 case BUILT_IN_FETCH_AND_ADD_2:
6182 case BUILT_IN_FETCH_AND_ADD_4:
6183 case BUILT_IN_FETCH_AND_ADD_8:
6184 case BUILT_IN_FETCH_AND_ADD_16:
6185 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6186 target = expand_builtin_sync_operation (mode, exp, PLUS,
6187 false, target, ignore);
6192 case BUILT_IN_FETCH_AND_SUB_1:
6193 case BUILT_IN_FETCH_AND_SUB_2:
6194 case BUILT_IN_FETCH_AND_SUB_4:
6195 case BUILT_IN_FETCH_AND_SUB_8:
6196 case BUILT_IN_FETCH_AND_SUB_16:
6197 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6198 target = expand_builtin_sync_operation (mode, exp, MINUS,
6199 false, target, ignore);
6204 case BUILT_IN_FETCH_AND_OR_1:
6205 case BUILT_IN_FETCH_AND_OR_2:
6206 case BUILT_IN_FETCH_AND_OR_4:
6207 case BUILT_IN_FETCH_AND_OR_8:
6208 case BUILT_IN_FETCH_AND_OR_16:
6209 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6210 target = expand_builtin_sync_operation (mode, exp, IOR,
6211 false, target, ignore);
6216 case BUILT_IN_FETCH_AND_AND_1:
6217 case BUILT_IN_FETCH_AND_AND_2:
6218 case BUILT_IN_FETCH_AND_AND_4:
6219 case BUILT_IN_FETCH_AND_AND_8:
6220 case BUILT_IN_FETCH_AND_AND_16:
6221 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6222 target = expand_builtin_sync_operation (mode, exp, AND,
6223 false, target, ignore);
6228 case BUILT_IN_FETCH_AND_XOR_1:
6229 case BUILT_IN_FETCH_AND_XOR_2:
6230 case BUILT_IN_FETCH_AND_XOR_4:
6231 case BUILT_IN_FETCH_AND_XOR_8:
6232 case BUILT_IN_FETCH_AND_XOR_16:
6233 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6234 target = expand_builtin_sync_operation (mode, exp, XOR,
6235 false, target, ignore);
6240 case BUILT_IN_FETCH_AND_NAND_1:
6241 case BUILT_IN_FETCH_AND_NAND_2:
6242 case BUILT_IN_FETCH_AND_NAND_4:
6243 case BUILT_IN_FETCH_AND_NAND_8:
6244 case BUILT_IN_FETCH_AND_NAND_16:
6245 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6246 target = expand_builtin_sync_operation (mode, exp, NOT,
6247 false, target, ignore);
6252 case BUILT_IN_ADD_AND_FETCH_1:
6253 case BUILT_IN_ADD_AND_FETCH_2:
6254 case BUILT_IN_ADD_AND_FETCH_4:
6255 case BUILT_IN_ADD_AND_FETCH_8:
6256 case BUILT_IN_ADD_AND_FETCH_16:
6257 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6258 target = expand_builtin_sync_operation (mode, exp, PLUS,
6259 true, target, ignore);
6264 case BUILT_IN_SUB_AND_FETCH_1:
6265 case BUILT_IN_SUB_AND_FETCH_2:
6266 case BUILT_IN_SUB_AND_FETCH_4:
6267 case BUILT_IN_SUB_AND_FETCH_8:
6268 case BUILT_IN_SUB_AND_FETCH_16:
6269 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6270 target = expand_builtin_sync_operation (mode, exp, MINUS,
6271 true, target, ignore);
6276 case BUILT_IN_OR_AND_FETCH_1:
6277 case BUILT_IN_OR_AND_FETCH_2:
6278 case BUILT_IN_OR_AND_FETCH_4:
6279 case BUILT_IN_OR_AND_FETCH_8:
6280 case BUILT_IN_OR_AND_FETCH_16:
6281 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6282 target = expand_builtin_sync_operation (mode, exp, IOR,
6283 true, target, ignore);
6288 case BUILT_IN_AND_AND_FETCH_1:
6289 case BUILT_IN_AND_AND_FETCH_2:
6290 case BUILT_IN_AND_AND_FETCH_4:
6291 case BUILT_IN_AND_AND_FETCH_8:
6292 case BUILT_IN_AND_AND_FETCH_16:
6293 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6294 target = expand_builtin_sync_operation (mode, exp, AND,
6295 true, target, ignore);
6300 case BUILT_IN_XOR_AND_FETCH_1:
6301 case BUILT_IN_XOR_AND_FETCH_2:
6302 case BUILT_IN_XOR_AND_FETCH_4:
6303 case BUILT_IN_XOR_AND_FETCH_8:
6304 case BUILT_IN_XOR_AND_FETCH_16:
6305 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6306 target = expand_builtin_sync_operation (mode, exp, XOR,
6307 true, target, ignore);
6312 case BUILT_IN_NAND_AND_FETCH_1:
6313 case BUILT_IN_NAND_AND_FETCH_2:
6314 case BUILT_IN_NAND_AND_FETCH_4:
6315 case BUILT_IN_NAND_AND_FETCH_8:
6316 case BUILT_IN_NAND_AND_FETCH_16:
6317 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6318 target = expand_builtin_sync_operation (mode, exp, NOT,
6319 true, target, ignore);
6324 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6325 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6326 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6327 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6328 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6329 if (mode == VOIDmode)
6330 mode = TYPE_MODE (boolean_type_node);
6331 if (!target || !register_operand (target, mode))
6332 target = gen_reg_rtx (mode);
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6335 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6340 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6341 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6342 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6343 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6344 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6346 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6351 case BUILT_IN_LOCK_TEST_AND_SET_1:
6352 case BUILT_IN_LOCK_TEST_AND_SET_2:
6353 case BUILT_IN_LOCK_TEST_AND_SET_4:
6354 case BUILT_IN_LOCK_TEST_AND_SET_8:
6355 case BUILT_IN_LOCK_TEST_AND_SET_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6357 target = expand_builtin_lock_test_and_set (mode, exp, target);
6362 case BUILT_IN_LOCK_RELEASE_1:
6363 case BUILT_IN_LOCK_RELEASE_2:
6364 case BUILT_IN_LOCK_RELEASE_4:
6365 case BUILT_IN_LOCK_RELEASE_8:
6366 case BUILT_IN_LOCK_RELEASE_16:
6367 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6368 expand_builtin_lock_release (mode, exp);
6371 case BUILT_IN_SYNCHRONIZE:
6372 expand_builtin_synchronize ();
6375 case BUILT_IN_OBJECT_SIZE:
6376 return expand_builtin_object_size (exp);
6378 case BUILT_IN_MEMCPY_CHK:
6379 case BUILT_IN_MEMPCPY_CHK:
6380 case BUILT_IN_MEMMOVE_CHK:
6381 case BUILT_IN_MEMSET_CHK:
6382 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6387 case BUILT_IN_STRCPY_CHK:
6388 case BUILT_IN_STPCPY_CHK:
6389 case BUILT_IN_STRNCPY_CHK:
6390 case BUILT_IN_STRCAT_CHK:
6391 case BUILT_IN_STRNCAT_CHK:
6392 case BUILT_IN_SNPRINTF_CHK:
6393 case BUILT_IN_VSNPRINTF_CHK:
6394 maybe_emit_chk_warning (exp, fcode);
6397 case BUILT_IN_SPRINTF_CHK:
6398 case BUILT_IN_VSPRINTF_CHK:
6399 maybe_emit_sprintf_chk_warning (exp, fcode);
6403 maybe_emit_free_warning (exp);
6406 default: /* just do library call, if unknown builtin */
6410 /* The switch statement above can drop through to cause the function
6411 to be called normally. */
6412 return expand_call (exp, target, ignore);
6415 /* Determine whether a tree node represents a call to a built-in
6416 function. If the tree T is a call to a built-in function with
6417 the right number of arguments of the appropriate types, return
6418 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6419 Otherwise the return value is END_BUILTINS. */
6421 enum built_in_function
6422 builtin_mathfn_code (const_tree t)
6424 const_tree fndecl, arg, parmlist;
6425 const_tree argtype, parmtype;
6426 const_call_expr_arg_iterator iter;
6428 if (TREE_CODE (t) != CALL_EXPR
6429 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6430 return END_BUILTINS;
6432 fndecl = get_callee_fndecl (t);
6433 if (fndecl == NULL_TREE
6434 || TREE_CODE (fndecl) != FUNCTION_DECL
6435 || ! DECL_BUILT_IN (fndecl)
6436 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6437 return END_BUILTINS;
6439 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6440 init_const_call_expr_arg_iterator (t, &iter);
6441 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6443 /* If a function doesn't take a variable number of arguments,
6444 the last element in the list will have type `void'. */
6445 parmtype = TREE_VALUE (parmlist);
6446 if (VOID_TYPE_P (parmtype))
6448 if (more_const_call_expr_args_p (&iter))
6449 return END_BUILTINS;
6450 return DECL_FUNCTION_CODE (fndecl);
6453 if (! more_const_call_expr_args_p (&iter))
6454 return END_BUILTINS;
6456 arg = next_const_call_expr_arg (&iter);
6457 argtype = TREE_TYPE (arg);
6459 if (SCALAR_FLOAT_TYPE_P (parmtype))
6461 if (! SCALAR_FLOAT_TYPE_P (argtype))
6462 return END_BUILTINS;
6464 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6466 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6467 return END_BUILTINS;
6469 else if (POINTER_TYPE_P (parmtype))
6471 if (! POINTER_TYPE_P (argtype))
6472 return END_BUILTINS;
6474 else if (INTEGRAL_TYPE_P (parmtype))
6476 if (! INTEGRAL_TYPE_P (argtype))
6477 return END_BUILTINS;
6480 return END_BUILTINS;
6483 /* Variable-length argument list. */
6484 return DECL_FUNCTION_CODE (fndecl);
6487 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6488 evaluate to a constant. */
6491 fold_builtin_constant_p (tree arg)
6493 /* We return 1 for a numeric type that's known to be a constant
6494 value at compile-time or for an aggregate type that's a
6495 literal constant. */
6498 /* If we know this is a constant, emit the constant of one. */
6499 if (CONSTANT_CLASS_P (arg)
6500 || (TREE_CODE (arg) == CONSTRUCTOR
6501 && TREE_CONSTANT (arg)))
6502 return integer_one_node;
6503 if (TREE_CODE (arg) == ADDR_EXPR)
6505 tree op = TREE_OPERAND (arg, 0);
6506 if (TREE_CODE (op) == STRING_CST
6507 || (TREE_CODE (op) == ARRAY_REF
6508 && integer_zerop (TREE_OPERAND (op, 1))
6509 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6510 return integer_one_node;
6513 /* If this expression has side effects, show we don't know it to be a
6514 constant. Likewise if it's a pointer or aggregate type since in
6515 those case we only want literals, since those are only optimized
6516 when generating RTL, not later.
6517 And finally, if we are compiling an initializer, not code, we
6518 need to return a definite result now; there's not going to be any
6519 more optimization done. */
6520 if (TREE_SIDE_EFFECTS (arg)
6521 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6522 || POINTER_TYPE_P (TREE_TYPE (arg))
6524 || folding_initializer)
6525 return integer_zero_node;
6530 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6531 return it as a truthvalue. */
6534 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6536 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6538 fn = built_in_decls[BUILT_IN_EXPECT];
6539 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6540 ret_type = TREE_TYPE (TREE_TYPE (fn));
6541 pred_type = TREE_VALUE (arg_types);
6542 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6544 pred = fold_convert_loc (loc, pred_type, pred);
6545 expected = fold_convert_loc (loc, expected_type, expected);
6546 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6548 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6549 build_int_cst (ret_type, 0));
6552 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6553 NULL_TREE if no simplification is possible. */
6556 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6559 enum tree_code code;
6561 /* If this is a builtin_expect within a builtin_expect keep the
6562 inner one. See through a comparison against a constant. It
6563 might have been added to create a thruthvalue. */
6565 if (COMPARISON_CLASS_P (inner)
6566 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6567 inner = TREE_OPERAND (inner, 0);
6569 if (TREE_CODE (inner) == CALL_EXPR
6570 && (fndecl = get_callee_fndecl (inner))
6571 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6572 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6575 /* Distribute the expected value over short-circuiting operators.
6576 See through the cast from truthvalue_type_node to long. */
6578 while (TREE_CODE (inner) == NOP_EXPR
6579 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6580 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6581 inner = TREE_OPERAND (inner, 0);
6583 code = TREE_CODE (inner);
6584 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6586 tree op0 = TREE_OPERAND (inner, 0);
6587 tree op1 = TREE_OPERAND (inner, 1);
6589 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6590 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6591 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6593 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6596 /* If the argument isn't invariant then there's nothing else we can do. */
6597 if (!TREE_CONSTANT (arg0))
6600 /* If we expect that a comparison against the argument will fold to
6601 a constant return the constant. In practice, this means a true
6602 constant or the address of a non-weak symbol. */
6605 if (TREE_CODE (inner) == ADDR_EXPR)
6609 inner = TREE_OPERAND (inner, 0);
6611 while (TREE_CODE (inner) == COMPONENT_REF
6612 || TREE_CODE (inner) == ARRAY_REF);
6613 if ((TREE_CODE (inner) == VAR_DECL
6614 || TREE_CODE (inner) == FUNCTION_DECL)
6615 && DECL_WEAK (inner))
6619 /* Otherwise, ARG0 already has the proper type for the return value. */
6623 /* Fold a call to __builtin_classify_type with argument ARG. */
6626 fold_builtin_classify_type (tree arg)
6629 return build_int_cst (NULL_TREE, no_type_class);
6631 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6634 /* Fold a call to __builtin_strlen with argument ARG. */
6637 fold_builtin_strlen (location_t loc, tree type, tree arg)
6639 if (!validate_arg (arg, POINTER_TYPE))
6643 tree len = c_strlen (arg, 0);
6646 return fold_convert_loc (loc, type, len);
6652 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6655 fold_builtin_inf (location_t loc, tree type, int warn)
6657 REAL_VALUE_TYPE real;
6659 /* __builtin_inff is intended to be usable to define INFINITY on all
6660 targets. If an infinity is not available, INFINITY expands "to a
6661 positive constant of type float that overflows at translation
6662 time", footnote "In this case, using INFINITY will violate the
6663 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6664 Thus we pedwarn to ensure this constraint violation is
6666 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6667 pedwarn (loc, 0, "target format does not support infinity");
6670 return build_real (type, real);
6673 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6676 fold_builtin_nan (tree arg, tree type, int quiet)
6678 REAL_VALUE_TYPE real;
6681 if (!validate_arg (arg, POINTER_TYPE))
6683 str = c_getstr (arg);
6687 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6690 return build_real (type, real);
6693 /* Return true if the floating point expression T has an integer value.
6694 We also allow +Inf, -Inf and NaN to be considered integer values. */
6697 integer_valued_real_p (tree t)
6699 switch (TREE_CODE (t))
6706 return integer_valued_real_p (TREE_OPERAND (t, 0));
6711 return integer_valued_real_p (TREE_OPERAND (t, 1));
6718 return integer_valued_real_p (TREE_OPERAND (t, 0))
6719 && integer_valued_real_p (TREE_OPERAND (t, 1));
6722 return integer_valued_real_p (TREE_OPERAND (t, 1))
6723 && integer_valued_real_p (TREE_OPERAND (t, 2));
6726 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6730 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6731 if (TREE_CODE (type) == INTEGER_TYPE)
6733 if (TREE_CODE (type) == REAL_TYPE)
6734 return integer_valued_real_p (TREE_OPERAND (t, 0));
6739 switch (builtin_mathfn_code (t))
6741 CASE_FLT_FN (BUILT_IN_CEIL):
6742 CASE_FLT_FN (BUILT_IN_FLOOR):
6743 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6744 CASE_FLT_FN (BUILT_IN_RINT):
6745 CASE_FLT_FN (BUILT_IN_ROUND):
6746 CASE_FLT_FN (BUILT_IN_TRUNC):
6749 CASE_FLT_FN (BUILT_IN_FMIN):
6750 CASE_FLT_FN (BUILT_IN_FMAX):
6751 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6752 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6765 /* FNDECL is assumed to be a builtin where truncation can be propagated
6766 across (for instance floor((double)f) == (double)floorf (f).
6767 Do the transformation for a call with argument ARG. */
6770 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6772 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6774 if (!validate_arg (arg, REAL_TYPE))
6777 /* Integer rounding functions are idempotent. */
6778 if (fcode == builtin_mathfn_code (arg))
6781 /* If argument is already integer valued, and we don't need to worry
6782 about setting errno, there's no need to perform rounding. */
6783 if (! flag_errno_math && integer_valued_real_p (arg))
6788 tree arg0 = strip_float_extensions (arg);
6789 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6790 tree newtype = TREE_TYPE (arg0);
6793 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6794 && (decl = mathfn_built_in (newtype, fcode)))
6795 return fold_convert_loc (loc, ftype,
6796 build_call_expr_loc (loc, decl, 1,
6797 fold_convert_loc (loc,
6804 /* FNDECL is assumed to be builtin which can narrow the FP type of
6805 the argument, for instance lround((double)f) -> lroundf (f).
6806 Do the transformation for a call with argument ARG. */
6809 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6811 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6813 if (!validate_arg (arg, REAL_TYPE))
6816 /* If argument is already integer valued, and we don't need to worry
6817 about setting errno, there's no need to perform rounding. */
6818 if (! flag_errno_math && integer_valued_real_p (arg))
6819 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6820 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6824 tree ftype = TREE_TYPE (arg);
6825 tree arg0 = strip_float_extensions (arg);
6826 tree newtype = TREE_TYPE (arg0);
6829 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6830 && (decl = mathfn_built_in (newtype, fcode)))
6831 return build_call_expr_loc (loc, decl, 1,
6832 fold_convert_loc (loc, newtype, arg0));
6835 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6836 sizeof (long long) == sizeof (long). */
6837 if (TYPE_PRECISION (long_long_integer_type_node)
6838 == TYPE_PRECISION (long_integer_type_node))
6840 tree newfn = NULL_TREE;
6843 CASE_FLT_FN (BUILT_IN_LLCEIL):
6844 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6847 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6848 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6851 CASE_FLT_FN (BUILT_IN_LLROUND):
6852 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6855 CASE_FLT_FN (BUILT_IN_LLRINT):
6856 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6865 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6866 return fold_convert_loc (loc,
6867 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6874 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6875 return type. Return NULL_TREE if no simplification can be made. */
6878 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6882 if (!validate_arg (arg, COMPLEX_TYPE)
6883 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6886 /* Calculate the result when the argument is a constant. */
6887 if (TREE_CODE (arg) == COMPLEX_CST
6888 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6892 if (TREE_CODE (arg) == COMPLEX_EXPR)
6894 tree real = TREE_OPERAND (arg, 0);
6895 tree imag = TREE_OPERAND (arg, 1);
6897 /* If either part is zero, cabs is fabs of the other. */
6898 if (real_zerop (real))
6899 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6900 if (real_zerop (imag))
6901 return fold_build1_loc (loc, ABS_EXPR, type, real);
6903 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6904 if (flag_unsafe_math_optimizations
6905 && operand_equal_p (real, imag, OEP_PURE_SAME))
6907 const REAL_VALUE_TYPE sqrt2_trunc
6908 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6910 return fold_build2_loc (loc, MULT_EXPR, type,
6911 fold_build1_loc (loc, ABS_EXPR, type, real),
6912 build_real (type, sqrt2_trunc));
6916 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6917 if (TREE_CODE (arg) == NEGATE_EXPR
6918 || TREE_CODE (arg) == CONJ_EXPR)
6919 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6921 /* Don't do this when optimizing for size. */
6922 if (flag_unsafe_math_optimizations
6923 && optimize && optimize_function_for_speed_p (cfun))
6925 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6927 if (sqrtfn != NULL_TREE)
6929 tree rpart, ipart, result;
6931 arg = builtin_save_expr (arg);
6933 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6934 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6936 rpart = builtin_save_expr (rpart);
6937 ipart = builtin_save_expr (ipart);
6939 result = fold_build2_loc (loc, PLUS_EXPR, type,
6940 fold_build2_loc (loc, MULT_EXPR, type,
6942 fold_build2_loc (loc, MULT_EXPR, type,
6945 return build_call_expr_loc (loc, sqrtfn, 1, result);
6952 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6953 Return NULL_TREE if no simplification can be made. */
6956 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6959 enum built_in_function fcode;
6962 if (!validate_arg (arg, REAL_TYPE))
6965 /* Calculate the result when the argument is a constant. */
6966 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6969 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6970 fcode = builtin_mathfn_code (arg);
6971 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6973 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6974 arg = fold_build2_loc (loc, MULT_EXPR, type,
6975 CALL_EXPR_ARG (arg, 0),
6976 build_real (type, dconsthalf));
6977 return build_call_expr_loc (loc, expfn, 1, arg);
6980 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6981 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6983 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6987 tree arg0 = CALL_EXPR_ARG (arg, 0);
6989 /* The inner root was either sqrt or cbrt. */
6990 /* This was a conditional expression but it triggered a bug
6992 REAL_VALUE_TYPE dconstroot;
6993 if (BUILTIN_SQRT_P (fcode))
6994 dconstroot = dconsthalf;
6996 dconstroot = dconst_third ();
6998 /* Adjust for the outer root. */
6999 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7000 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7001 tree_root = build_real (type, dconstroot);
7002 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7006 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7007 if (flag_unsafe_math_optimizations
7008 && (fcode == BUILT_IN_POW
7009 || fcode == BUILT_IN_POWF
7010 || fcode == BUILT_IN_POWL))
7012 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7013 tree arg0 = CALL_EXPR_ARG (arg, 0);
7014 tree arg1 = CALL_EXPR_ARG (arg, 1);
7016 if (!tree_expr_nonnegative_p (arg0))
7017 arg0 = build1 (ABS_EXPR, type, arg0);
7018 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7019 build_real (type, dconsthalf));
7020 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7026 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7027 Return NULL_TREE if no simplification can be made. */
7030 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7032 const enum built_in_function fcode = builtin_mathfn_code (arg);
7035 if (!validate_arg (arg, REAL_TYPE))
7038 /* Calculate the result when the argument is a constant. */
7039 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7042 if (flag_unsafe_math_optimizations)
7044 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7045 if (BUILTIN_EXPONENT_P (fcode))
7047 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7048 const REAL_VALUE_TYPE third_trunc =
7049 real_value_truncate (TYPE_MODE (type), dconst_third ());
7050 arg = fold_build2_loc (loc, MULT_EXPR, type,
7051 CALL_EXPR_ARG (arg, 0),
7052 build_real (type, third_trunc));
7053 return build_call_expr_loc (loc, expfn, 1, arg);
7056 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7057 if (BUILTIN_SQRT_P (fcode))
7059 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7063 tree arg0 = CALL_EXPR_ARG (arg, 0);
7065 REAL_VALUE_TYPE dconstroot = dconst_third ();
7067 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7068 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7069 tree_root = build_real (type, dconstroot);
7070 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7074 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7075 if (BUILTIN_CBRT_P (fcode))
7077 tree arg0 = CALL_EXPR_ARG (arg, 0);
7078 if (tree_expr_nonnegative_p (arg0))
7080 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7085 REAL_VALUE_TYPE dconstroot;
7087 real_arithmetic (&dconstroot, MULT_EXPR,
7088 dconst_third_ptr (), dconst_third_ptr ());
7089 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7090 tree_root = build_real (type, dconstroot);
7091 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7096 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7097 if (fcode == BUILT_IN_POW
7098 || fcode == BUILT_IN_POWF
7099 || fcode == BUILT_IN_POWL)
7101 tree arg00 = CALL_EXPR_ARG (arg, 0);
7102 tree arg01 = CALL_EXPR_ARG (arg, 1);
7103 if (tree_expr_nonnegative_p (arg00))
7105 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7106 const REAL_VALUE_TYPE dconstroot
7107 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7108 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7109 build_real (type, dconstroot));
7110 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7117 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7118 TYPE is the type of the return value. Return NULL_TREE if no
7119 simplification can be made. */
7122 fold_builtin_cos (location_t loc,
7123 tree arg, tree type, tree fndecl)
7127 if (!validate_arg (arg, REAL_TYPE))
7130 /* Calculate the result when the argument is a constant. */
7131 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7134 /* Optimize cos(-x) into cos (x). */
7135 if ((narg = fold_strip_sign_ops (arg)))
7136 return build_call_expr_loc (loc, fndecl, 1, narg);
7141 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7142 Return NULL_TREE if no simplification can be made. */
7145 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7147 if (validate_arg (arg, REAL_TYPE))
7151 /* Calculate the result when the argument is a constant. */
7152 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7155 /* Optimize cosh(-x) into cosh (x). */
7156 if ((narg = fold_strip_sign_ops (arg)))
7157 return build_call_expr_loc (loc, fndecl, 1, narg);
7163 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7164 argument ARG. TYPE is the type of the return value. Return
7165 NULL_TREE if no simplification can be made. */
7168 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7171 if (validate_arg (arg, COMPLEX_TYPE)
7172 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7176 /* Calculate the result when the argument is a constant. */
7177 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7180 /* Optimize fn(-x) into fn(x). */
7181 if ((tmp = fold_strip_sign_ops (arg)))
7182 return build_call_expr_loc (loc, fndecl, 1, tmp);
7188 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7189 Return NULL_TREE if no simplification can be made. */
7192 fold_builtin_tan (tree arg, tree type)
7194 enum built_in_function fcode;
7197 if (!validate_arg (arg, REAL_TYPE))
7200 /* Calculate the result when the argument is a constant. */
7201 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7204 /* Optimize tan(atan(x)) = x. */
7205 fcode = builtin_mathfn_code (arg);
7206 if (flag_unsafe_math_optimizations
7207 && (fcode == BUILT_IN_ATAN
7208 || fcode == BUILT_IN_ATANF
7209 || fcode == BUILT_IN_ATANL))
7210 return CALL_EXPR_ARG (arg, 0);
7215 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7216 NULL_TREE if no simplification can be made. */
7219 fold_builtin_sincos (location_t loc,
7220 tree arg0, tree arg1, tree arg2)
7225 if (!validate_arg (arg0, REAL_TYPE)
7226 || !validate_arg (arg1, POINTER_TYPE)
7227 || !validate_arg (arg2, POINTER_TYPE))
7230 type = TREE_TYPE (arg0);
7232 /* Calculate the result when the argument is a constant. */
7233 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7236 /* Canonicalize sincos to cexpi. */
7237 if (!TARGET_C99_FUNCTIONS)
7239 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7243 call = build_call_expr_loc (loc, fn, 1, arg0);
7244 call = builtin_save_expr (call);
7246 return build2 (COMPOUND_EXPR, void_type_node,
7247 build2 (MODIFY_EXPR, void_type_node,
7248 build_fold_indirect_ref_loc (loc, arg1),
7249 build1 (IMAGPART_EXPR, type, call)),
7250 build2 (MODIFY_EXPR, void_type_node,
7251 build_fold_indirect_ref_loc (loc, arg2),
7252 build1 (REALPART_EXPR, type, call)));
7255 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7256 NULL_TREE if no simplification can be made. */
7259 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7262 tree realp, imagp, ifn;
7265 if (!validate_arg (arg0, COMPLEX_TYPE)
7266 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7269 /* Calculate the result when the argument is a constant. */
7270 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7273 rtype = TREE_TYPE (TREE_TYPE (arg0));
7275 /* In case we can figure out the real part of arg0 and it is constant zero
7277 if (!TARGET_C99_FUNCTIONS)
7279 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7283 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7284 && real_zerop (realp))
7286 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7287 return build_call_expr_loc (loc, ifn, 1, narg);
7290 /* In case we can easily decompose real and imaginary parts split cexp
7291 to exp (r) * cexpi (i). */
7292 if (flag_unsafe_math_optimizations
7295 tree rfn, rcall, icall;
7297 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7301 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7305 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7306 icall = builtin_save_expr (icall);
7307 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7308 rcall = builtin_save_expr (rcall);
7309 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7310 fold_build2_loc (loc, MULT_EXPR, rtype,
7312 fold_build1_loc (loc, REALPART_EXPR,
7314 fold_build2_loc (loc, MULT_EXPR, rtype,
7316 fold_build1_loc (loc, IMAGPART_EXPR,
7323 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7324 Return NULL_TREE if no simplification can be made. */
7327 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7329 if (!validate_arg (arg, REAL_TYPE))
7332 /* Optimize trunc of constant value. */
7333 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7335 REAL_VALUE_TYPE r, x;
7336 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7338 x = TREE_REAL_CST (arg);
7339 real_trunc (&r, TYPE_MODE (type), &x);
7340 return build_real (type, r);
7343 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7346 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7347 Return NULL_TREE if no simplification can be made. */
7350 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7352 if (!validate_arg (arg, REAL_TYPE))
7355 /* Optimize floor of constant value. */
7356 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7360 x = TREE_REAL_CST (arg);
7361 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7363 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7366 real_floor (&r, TYPE_MODE (type), &x);
7367 return build_real (type, r);
7371 /* Fold floor (x) where x is nonnegative to trunc (x). */
7372 if (tree_expr_nonnegative_p (arg))
7374 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7376 return build_call_expr_loc (loc, truncfn, 1, arg);
7379 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7382 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7383 Return NULL_TREE if no simplification can be made. */
7386 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7388 if (!validate_arg (arg, REAL_TYPE))
7391 /* Optimize ceil of constant value. */
7392 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7396 x = TREE_REAL_CST (arg);
7397 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7399 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7402 real_ceil (&r, TYPE_MODE (type), &x);
7403 return build_real (type, r);
7407 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7410 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7411 Return NULL_TREE if no simplification can be made. */
7414 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7416 if (!validate_arg (arg, REAL_TYPE))
7419 /* Optimize round of constant value. */
7420 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7424 x = TREE_REAL_CST (arg);
7425 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7427 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7430 real_round (&r, TYPE_MODE (type), &x);
7431 return build_real (type, r);
7435 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7438 /* Fold function call to builtin lround, lroundf or lroundl (or the
7439 corresponding long long versions) and other rounding functions. ARG
7440 is the argument to the call. Return NULL_TREE if no simplification
7444 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7446 if (!validate_arg (arg, REAL_TYPE))
7449 /* Optimize lround of constant value. */
7450 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7452 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7454 if (real_isfinite (&x))
7456 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7457 tree ftype = TREE_TYPE (arg);
7458 unsigned HOST_WIDE_INT lo2;
7459 HOST_WIDE_INT hi, lo;
7462 switch (DECL_FUNCTION_CODE (fndecl))
7464 CASE_FLT_FN (BUILT_IN_LFLOOR):
7465 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7466 real_floor (&r, TYPE_MODE (ftype), &x);
7469 CASE_FLT_FN (BUILT_IN_LCEIL):
7470 CASE_FLT_FN (BUILT_IN_LLCEIL):
7471 real_ceil (&r, TYPE_MODE (ftype), &x);
7474 CASE_FLT_FN (BUILT_IN_LROUND):
7475 CASE_FLT_FN (BUILT_IN_LLROUND):
7476 real_round (&r, TYPE_MODE (ftype), &x);
7483 REAL_VALUE_TO_INT (&lo, &hi, r);
7484 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7485 return build_int_cst_wide (itype, lo2, hi);
7489 switch (DECL_FUNCTION_CODE (fndecl))
7491 CASE_FLT_FN (BUILT_IN_LFLOOR):
7492 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7493 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7494 if (tree_expr_nonnegative_p (arg))
7495 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7496 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7501 return fold_fixed_mathfn (loc, fndecl, arg);
7504 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7505 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7506 the argument to the call. Return NULL_TREE if no simplification can
7510 fold_builtin_bitop (tree fndecl, tree arg)
7512 if (!validate_arg (arg, INTEGER_TYPE))
7515 /* Optimize for constant argument. */
7516 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7518 HOST_WIDE_INT hi, width, result;
7519 unsigned HOST_WIDE_INT lo;
7522 type = TREE_TYPE (arg);
7523 width = TYPE_PRECISION (type);
7524 lo = TREE_INT_CST_LOW (arg);
7526 /* Clear all the bits that are beyond the type's precision. */
7527 if (width > HOST_BITS_PER_WIDE_INT)
7529 hi = TREE_INT_CST_HIGH (arg);
7530 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7531 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7536 if (width < HOST_BITS_PER_WIDE_INT)
7537 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7540 switch (DECL_FUNCTION_CODE (fndecl))
7542 CASE_INT_FN (BUILT_IN_FFS):
7544 result = exact_log2 (lo & -lo) + 1;
7546 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7551 CASE_INT_FN (BUILT_IN_CLZ):
7553 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7555 result = width - floor_log2 (lo) - 1;
7556 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7560 CASE_INT_FN (BUILT_IN_CTZ):
7562 result = exact_log2 (lo & -lo);
7564 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7565 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7569 CASE_INT_FN (BUILT_IN_POPCOUNT):
7572 result++, lo &= lo - 1;
7574 result++, hi &= hi - 1;
7577 CASE_INT_FN (BUILT_IN_PARITY):
7580 result++, lo &= lo - 1;
7582 result++, hi &= hi - 1;
7590 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7596 /* Fold function call to builtin_bswap and the long and long long
7597 variants. Return NULL_TREE if no simplification can be made. */
7599 fold_builtin_bswap (tree fndecl, tree arg)
7601 if (! validate_arg (arg, INTEGER_TYPE))
7604 /* Optimize constant value. */
7605 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7607 HOST_WIDE_INT hi, width, r_hi = 0;
7608 unsigned HOST_WIDE_INT lo, r_lo = 0;
7611 type = TREE_TYPE (arg);
7612 width = TYPE_PRECISION (type);
7613 lo = TREE_INT_CST_LOW (arg);
7614 hi = TREE_INT_CST_HIGH (arg);
7616 switch (DECL_FUNCTION_CODE (fndecl))
7618 case BUILT_IN_BSWAP32:
7619 case BUILT_IN_BSWAP64:
7623 for (s = 0; s < width; s += 8)
7625 int d = width - s - 8;
7626 unsigned HOST_WIDE_INT byte;
7628 if (s < HOST_BITS_PER_WIDE_INT)
7629 byte = (lo >> s) & 0xff;
7631 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7633 if (d < HOST_BITS_PER_WIDE_INT)
7636 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7646 if (width < HOST_BITS_PER_WIDE_INT)
7647 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7649 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7655 /* A subroutine of fold_builtin to fold the various logarithmic
7656 functions. Return NULL_TREE if no simplification can me made.
7657 FUNC is the corresponding MPFR logarithm function. */
7660 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7661 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7663 if (validate_arg (arg, REAL_TYPE))
7665 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7667 const enum built_in_function fcode = builtin_mathfn_code (arg);
7669 /* Calculate the result when the argument is a constant. */
7670 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7673 /* Special case, optimize logN(expN(x)) = x. */
7674 if (flag_unsafe_math_optimizations
7675 && ((func == mpfr_log
7676 && (fcode == BUILT_IN_EXP
7677 || fcode == BUILT_IN_EXPF
7678 || fcode == BUILT_IN_EXPL))
7679 || (func == mpfr_log2
7680 && (fcode == BUILT_IN_EXP2
7681 || fcode == BUILT_IN_EXP2F
7682 || fcode == BUILT_IN_EXP2L))
7683 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7684 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7686 /* Optimize logN(func()) for various exponential functions. We
7687 want to determine the value "x" and the power "exponent" in
7688 order to transform logN(x**exponent) into exponent*logN(x). */
7689 if (flag_unsafe_math_optimizations)
7691 tree exponent = 0, x = 0;
7695 CASE_FLT_FN (BUILT_IN_EXP):
7696 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7697 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7699 exponent = CALL_EXPR_ARG (arg, 0);
7701 CASE_FLT_FN (BUILT_IN_EXP2):
7702 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7703 x = build_real (type, dconst2);
7704 exponent = CALL_EXPR_ARG (arg, 0);
7706 CASE_FLT_FN (BUILT_IN_EXP10):
7707 CASE_FLT_FN (BUILT_IN_POW10):
7708 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7710 REAL_VALUE_TYPE dconst10;
7711 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7712 x = build_real (type, dconst10);
7714 exponent = CALL_EXPR_ARG (arg, 0);
7716 CASE_FLT_FN (BUILT_IN_SQRT):
7717 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7718 x = CALL_EXPR_ARG (arg, 0);
7719 exponent = build_real (type, dconsthalf);
7721 CASE_FLT_FN (BUILT_IN_CBRT):
7722 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7723 x = CALL_EXPR_ARG (arg, 0);
7724 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7727 CASE_FLT_FN (BUILT_IN_POW):
7728 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7729 x = CALL_EXPR_ARG (arg, 0);
7730 exponent = CALL_EXPR_ARG (arg, 1);
7736 /* Now perform the optimization. */
7739 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7740 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7748 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7749 NULL_TREE if no simplification can be made. */
7752 fold_builtin_hypot (location_t loc, tree fndecl,
7753 tree arg0, tree arg1, tree type)
7755 tree res, narg0, narg1;
7757 if (!validate_arg (arg0, REAL_TYPE)
7758 || !validate_arg (arg1, REAL_TYPE))
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7765 /* If either argument to hypot has a negate or abs, strip that off.
7766 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7767 narg0 = fold_strip_sign_ops (arg0);
7768 narg1 = fold_strip_sign_ops (arg1);
7771 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7772 narg1 ? narg1 : arg1);
7775 /* If either argument is zero, hypot is fabs of the other. */
7776 if (real_zerop (arg0))
7777 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7778 else if (real_zerop (arg1))
7779 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7781 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7782 if (flag_unsafe_math_optimizations
7783 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7785 const REAL_VALUE_TYPE sqrt2_trunc
7786 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7787 return fold_build2_loc (loc, MULT_EXPR, type,
7788 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7789 build_real (type, sqrt2_trunc));
7796 /* Fold a builtin function call to pow, powf, or powl. Return
7797 NULL_TREE if no simplification can be made. */
7799 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7803 if (!validate_arg (arg0, REAL_TYPE)
7804 || !validate_arg (arg1, REAL_TYPE))
7807 /* Calculate the result when the argument is a constant. */
7808 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7811 /* Optimize pow(1.0,y) = 1.0. */
7812 if (real_onep (arg0))
7813 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7815 if (TREE_CODE (arg1) == REAL_CST
7816 && !TREE_OVERFLOW (arg1))
7818 REAL_VALUE_TYPE cint;
7822 c = TREE_REAL_CST (arg1);
7824 /* Optimize pow(x,0.0) = 1.0. */
7825 if (REAL_VALUES_EQUAL (c, dconst0))
7826 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7829 /* Optimize pow(x,1.0) = x. */
7830 if (REAL_VALUES_EQUAL (c, dconst1))
7833 /* Optimize pow(x,-1.0) = 1.0/x. */
7834 if (REAL_VALUES_EQUAL (c, dconstm1))
7835 return fold_build2_loc (loc, RDIV_EXPR, type,
7836 build_real (type, dconst1), arg0);
7838 /* Optimize pow(x,0.5) = sqrt(x). */
7839 if (flag_unsafe_math_optimizations
7840 && REAL_VALUES_EQUAL (c, dconsthalf))
7842 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7844 if (sqrtfn != NULL_TREE)
7845 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7848 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7849 if (flag_unsafe_math_optimizations)
7851 const REAL_VALUE_TYPE dconstroot
7852 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7854 if (REAL_VALUES_EQUAL (c, dconstroot))
7856 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7857 if (cbrtfn != NULL_TREE)
7858 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7862 /* Check for an integer exponent. */
7863 n = real_to_integer (&c);
7864 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7865 if (real_identical (&c, &cint))
7867 /* Attempt to evaluate pow at compile-time, unless this should
7868 raise an exception. */
7869 if (TREE_CODE (arg0) == REAL_CST
7870 && !TREE_OVERFLOW (arg0)
7872 || (!flag_trapping_math && !flag_errno_math)
7873 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7878 x = TREE_REAL_CST (arg0);
7879 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7880 if (flag_unsafe_math_optimizations || !inexact)
7881 return build_real (type, x);
7884 /* Strip sign ops from even integer powers. */
7885 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7887 tree narg0 = fold_strip_sign_ops (arg0);
7889 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7894 if (flag_unsafe_math_optimizations)
7896 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7898 /* Optimize pow(expN(x),y) = expN(x*y). */
7899 if (BUILTIN_EXPONENT_P (fcode))
7901 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7902 tree arg = CALL_EXPR_ARG (arg0, 0);
7903 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7904 return build_call_expr_loc (loc, expfn, 1, arg);
7907 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7908 if (BUILTIN_SQRT_P (fcode))
7910 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7911 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7912 build_real (type, dconsthalf));
7913 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7916 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7917 if (BUILTIN_CBRT_P (fcode))
7919 tree arg = CALL_EXPR_ARG (arg0, 0);
7920 if (tree_expr_nonnegative_p (arg))
7922 const REAL_VALUE_TYPE dconstroot
7923 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7924 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7925 build_real (type, dconstroot));
7926 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7930 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7931 if (fcode == BUILT_IN_POW
7932 || fcode == BUILT_IN_POWF
7933 || fcode == BUILT_IN_POWL)
7935 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7936 if (tree_expr_nonnegative_p (arg00))
7938 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7939 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7940 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7948 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7949 Return NULL_TREE if no simplification can be made. */
7951 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7952 tree arg0, tree arg1, tree type)
7954 if (!validate_arg (arg0, REAL_TYPE)
7955 || !validate_arg (arg1, INTEGER_TYPE))
7958 /* Optimize pow(1.0,y) = 1.0. */
7959 if (real_onep (arg0))
7960 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7962 if (host_integerp (arg1, 0))
7964 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7966 /* Evaluate powi at compile-time. */
7967 if (TREE_CODE (arg0) == REAL_CST
7968 && !TREE_OVERFLOW (arg0))
7971 x = TREE_REAL_CST (arg0);
7972 real_powi (&x, TYPE_MODE (type), &x, c);
7973 return build_real (type, x);
7976 /* Optimize pow(x,0) = 1.0. */
7978 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7981 /* Optimize pow(x,1) = x. */
7985 /* Optimize pow(x,-1) = 1.0/x. */
7987 return fold_build2_loc (loc, RDIV_EXPR, type,
7988 build_real (type, dconst1), arg0);
7994 /* A subroutine of fold_builtin to fold the various exponent
7995 functions. Return NULL_TREE if no simplification can be made.
7996 FUNC is the corresponding MPFR exponent function. */
7999 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8000 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8002 if (validate_arg (arg, REAL_TYPE))
8004 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8007 /* Calculate the result when the argument is a constant. */
8008 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8011 /* Optimize expN(logN(x)) = x. */
8012 if (flag_unsafe_math_optimizations)
8014 const enum built_in_function fcode = builtin_mathfn_code (arg);
8016 if ((func == mpfr_exp
8017 && (fcode == BUILT_IN_LOG
8018 || fcode == BUILT_IN_LOGF
8019 || fcode == BUILT_IN_LOGL))
8020 || (func == mpfr_exp2
8021 && (fcode == BUILT_IN_LOG2
8022 || fcode == BUILT_IN_LOG2F
8023 || fcode == BUILT_IN_LOG2L))
8024 || (func == mpfr_exp10
8025 && (fcode == BUILT_IN_LOG10
8026 || fcode == BUILT_IN_LOG10F
8027 || fcode == BUILT_IN_LOG10L)))
8028 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8035 /* Return true if VAR is a VAR_DECL or a component thereof. */
8038 var_decl_component_p (tree var)
8041 while (handled_component_p (inner))
8042 inner = TREE_OPERAND (inner, 0);
8043 return SSA_VAR_P (inner);
8046 /* Fold function call to builtin memset. Return
8047 NULL_TREE if no simplification can be made. */
8050 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8051 tree type, bool ignore)
8053 tree var, ret, etype;
8054 unsigned HOST_WIDE_INT length, cval;
8056 if (! validate_arg (dest, POINTER_TYPE)
8057 || ! validate_arg (c, INTEGER_TYPE)
8058 || ! validate_arg (len, INTEGER_TYPE))
8061 if (! host_integerp (len, 1))
8064 /* If the LEN parameter is zero, return DEST. */
8065 if (integer_zerop (len))
8066 return omit_one_operand_loc (loc, type, dest, c);
8068 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8073 if (TREE_CODE (var) != ADDR_EXPR)
8076 var = TREE_OPERAND (var, 0);
8077 if (TREE_THIS_VOLATILE (var))
8080 etype = TREE_TYPE (var);
8081 if (TREE_CODE (etype) == ARRAY_TYPE)
8082 etype = TREE_TYPE (etype);
8084 if (!INTEGRAL_TYPE_P (etype)
8085 && !POINTER_TYPE_P (etype))
8088 if (! var_decl_component_p (var))
8091 length = tree_low_cst (len, 1);
8092 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8093 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8097 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8100 if (integer_zerop (c))
8104 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8107 cval = tree_low_cst (c, 1);
8111 cval |= (cval << 31) << 1;
8114 ret = build_int_cst_type (etype, cval);
8115 var = build_fold_indirect_ref_loc (loc,
8116 fold_convert_loc (loc,
8117 build_pointer_type (etype),
8119 ret = build2 (MODIFY_EXPR, etype, var, ret);
8123 return omit_one_operand_loc (loc, type, dest, ret);
8126 /* Fold function call to builtin memset. Return
8127 NULL_TREE if no simplification can be made. */
8130 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8132 if (! validate_arg (dest, POINTER_TYPE)
8133 || ! validate_arg (size, INTEGER_TYPE))
8139 /* New argument list transforming bzero(ptr x, int y) to
8140 memset(ptr x, int 0, size_t y). This is done this way
8141 so that if it isn't expanded inline, we fallback to
8142 calling bzero instead of memset. */
8144 return fold_builtin_memset (loc, dest, integer_zero_node,
8145 fold_convert_loc (loc, sizetype, size),
8146 void_type_node, ignore);
8149 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8150 NULL_TREE if no simplification can be made.
8151 If ENDP is 0, return DEST (like memcpy).
8152 If ENDP is 1, return DEST+LEN (like mempcpy).
8153 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8154 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8158 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8159 tree len, tree type, bool ignore, int endp)
8161 tree destvar, srcvar, expr;
8163 if (! validate_arg (dest, POINTER_TYPE)
8164 || ! validate_arg (src, POINTER_TYPE)
8165 || ! validate_arg (len, INTEGER_TYPE))
8168 /* If the LEN parameter is zero, return DEST. */
8169 if (integer_zerop (len))
8170 return omit_one_operand_loc (loc, type, dest, src);
8172 /* If SRC and DEST are the same (and not volatile), return
8173 DEST{,+LEN,+LEN-1}. */
8174 if (operand_equal_p (src, dest, 0))
8178 tree srctype, desttype;
8179 int src_align, dest_align;
8183 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8184 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8186 /* Both DEST and SRC must be pointer types.
8187 ??? This is what old code did. Is the testing for pointer types
8190 If either SRC is readonly or length is 1, we can use memcpy. */
8191 if (!dest_align || !src_align)
8193 if (readonly_data_expr (src)
8194 || (host_integerp (len, 1)
8195 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8196 >= tree_low_cst (len, 1))))
8198 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8201 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8204 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8205 srcvar = build_fold_indirect_ref_loc (loc, src);
8206 destvar = build_fold_indirect_ref_loc (loc, dest);
8208 && !TREE_THIS_VOLATILE (srcvar)
8210 && !TREE_THIS_VOLATILE (destvar))
8212 tree src_base, dest_base, fn;
8213 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8214 HOST_WIDE_INT size = -1;
8215 HOST_WIDE_INT maxsize = -1;
8218 if (handled_component_p (src_base))
8219 src_base = get_ref_base_and_extent (src_base, &src_offset,
8221 dest_base = destvar;
8222 if (handled_component_p (dest_base))
8223 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8225 if (host_integerp (len, 1))
8227 maxsize = tree_low_cst (len, 1);
8229 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8232 maxsize *= BITS_PER_UNIT;
8236 if (SSA_VAR_P (src_base)
8237 && SSA_VAR_P (dest_base))
8239 if (operand_equal_p (src_base, dest_base, 0)
8240 && ranges_overlap_p (src_offset, maxsize,
8241 dest_offset, maxsize))
8244 else if (TREE_CODE (src_base) == INDIRECT_REF
8245 && TREE_CODE (dest_base) == INDIRECT_REF)
8247 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8248 TREE_OPERAND (dest_base, 0), 0)
8249 || ranges_overlap_p (src_offset, maxsize,
8250 dest_offset, maxsize))
8256 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8259 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8264 if (!host_integerp (len, 0))
8267 This logic lose for arguments like (type *)malloc (sizeof (type)),
8268 since we strip the casts of up to VOID return value from malloc.
8269 Perhaps we ought to inherit type from non-VOID argument here? */
8272 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8273 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8275 tree tem = TREE_OPERAND (src, 0);
8277 if (tem != TREE_OPERAND (src, 0))
8278 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8280 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8282 tree tem = TREE_OPERAND (dest, 0);
8284 if (tem != TREE_OPERAND (dest, 0))
8285 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8287 srctype = TREE_TYPE (TREE_TYPE (src));
8289 && TREE_CODE (srctype) == ARRAY_TYPE
8290 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8292 srctype = TREE_TYPE (srctype);
8294 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8296 desttype = TREE_TYPE (TREE_TYPE (dest));
8298 && TREE_CODE (desttype) == ARRAY_TYPE
8299 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8301 desttype = TREE_TYPE (desttype);
8303 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8305 if (!srctype || !desttype
8306 || !TYPE_SIZE_UNIT (srctype)
8307 || !TYPE_SIZE_UNIT (desttype)
8308 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8309 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8310 || TYPE_VOLATILE (srctype)
8311 || TYPE_VOLATILE (desttype))
8314 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8315 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8316 if (dest_align < (int) TYPE_ALIGN (desttype)
8317 || src_align < (int) TYPE_ALIGN (srctype))
8321 dest = builtin_save_expr (dest);
8324 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8326 srcvar = build_fold_indirect_ref_loc (loc, src);
8327 if (TREE_THIS_VOLATILE (srcvar))
8329 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8331 /* With memcpy, it is possible to bypass aliasing rules, so without
8332 this check i.e. execute/20060930-2.c would be misoptimized,
8333 because it use conflicting alias set to hold argument for the
8334 memcpy call. This check is probably unnecessary with
8335 -fno-strict-aliasing. Similarly for destvar. See also
8337 else if (!var_decl_component_p (srcvar))
8341 destvar = NULL_TREE;
8342 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8344 destvar = build_fold_indirect_ref_loc (loc, dest);
8345 if (TREE_THIS_VOLATILE (destvar))
8347 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8348 destvar = NULL_TREE;
8349 else if (!var_decl_component_p (destvar))
8350 destvar = NULL_TREE;
8353 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8356 if (srcvar == NULL_TREE)
8359 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8362 srctype = build_qualified_type (desttype, 0);
8363 if (src_align < (int) TYPE_ALIGN (srctype))
8365 if (AGGREGATE_TYPE_P (srctype)
8366 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8369 srctype = build_variant_type_copy (srctype);
8370 TYPE_ALIGN (srctype) = src_align;
8371 TYPE_USER_ALIGN (srctype) = 1;
8372 TYPE_PACKED (srctype) = 1;
8374 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8375 src = fold_convert_loc (loc, srcptype, src);
8376 srcvar = build_fold_indirect_ref_loc (loc, src);
8378 else if (destvar == NULL_TREE)
8381 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8384 desttype = build_qualified_type (srctype, 0);
8385 if (dest_align < (int) TYPE_ALIGN (desttype))
8387 if (AGGREGATE_TYPE_P (desttype)
8388 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8391 desttype = build_variant_type_copy (desttype);
8392 TYPE_ALIGN (desttype) = dest_align;
8393 TYPE_USER_ALIGN (desttype) = 1;
8394 TYPE_PACKED (desttype) = 1;
8396 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8397 dest = fold_convert_loc (loc, destptype, dest);
8398 destvar = build_fold_indirect_ref_loc (loc, dest);
8401 if (srctype == desttype
8402 || (gimple_in_ssa_p (cfun)
8403 && useless_type_conversion_p (desttype, srctype)))
8405 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8406 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8407 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8408 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8409 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8411 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8412 TREE_TYPE (destvar), srcvar);
8413 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8419 if (endp == 0 || endp == 3)
8420 return omit_one_operand_loc (loc, type, dest, expr);
8426 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8429 len = fold_convert_loc (loc, sizetype, len);
8430 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8431 dest = fold_convert_loc (loc, type, dest);
8433 dest = omit_one_operand_loc (loc, type, dest, expr);
8437 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8438 If LEN is not NULL, it represents the length of the string to be
8439 copied. Return NULL_TREE if no simplification can be made. */
8442 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8446 if (!validate_arg (dest, POINTER_TYPE)
8447 || !validate_arg (src, POINTER_TYPE))
8450 /* If SRC and DEST are the same (and not volatile), return DEST. */
8451 if (operand_equal_p (src, dest, 0))
8452 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8454 if (optimize_function_for_size_p (cfun))
8457 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8463 len = c_strlen (src, 1);
8464 if (! len || TREE_SIDE_EFFECTS (len))
8468 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8469 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8470 build_call_expr_loc (loc, fn, 3, dest, src, len));
8473 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8474 Return NULL_TREE if no simplification can be made. */
8477 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8479 tree fn, len, lenp1, call, type;
8481 if (!validate_arg (dest, POINTER_TYPE)
8482 || !validate_arg (src, POINTER_TYPE))
8485 len = c_strlen (src, 1);
8487 || TREE_CODE (len) != INTEGER_CST)
8490 if (optimize_function_for_size_p (cfun)
8491 /* If length is zero it's small enough. */
8492 && !integer_zerop (len))
8495 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8499 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8500 /* We use dest twice in building our expression. Save it from
8501 multiple expansions. */
8502 dest = builtin_save_expr (dest);
8503 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8505 type = TREE_TYPE (TREE_TYPE (fndecl));
8506 len = fold_convert_loc (loc, sizetype, len);
8507 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8508 dest = fold_convert_loc (loc, type, dest);
8509 dest = omit_one_operand_loc (loc, type, dest, call);
8513 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8514 If SLEN is not NULL, it represents the length of the source string.
8515 Return NULL_TREE if no simplification can be made. */
8518 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8519 tree src, tree len, tree slen)
8523 if (!validate_arg (dest, POINTER_TYPE)
8524 || !validate_arg (src, POINTER_TYPE)
8525 || !validate_arg (len, INTEGER_TYPE))
8528 /* If the LEN parameter is zero, return DEST. */
8529 if (integer_zerop (len))
8530 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8532 /* We can't compare slen with len as constants below if len is not a
8534 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8538 slen = c_strlen (src, 1);
8540 /* Now, we must be passed a constant src ptr parameter. */
8541 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8544 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8546 /* We do not support simplification of this case, though we do
8547 support it when expanding trees into RTL. */
8548 /* FIXME: generate a call to __builtin_memset. */
8549 if (tree_int_cst_lt (slen, len))
8552 /* OK transform into builtin memcpy. */
8553 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8556 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8557 build_call_expr_loc (loc, fn, 3, dest, src, len));
8560 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8561 arguments to the call, and TYPE is its return type.
8562 Return NULL_TREE if no simplification can be made. */
8565 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8567 if (!validate_arg (arg1, POINTER_TYPE)
8568 || !validate_arg (arg2, INTEGER_TYPE)
8569 || !validate_arg (len, INTEGER_TYPE))
8575 if (TREE_CODE (arg2) != INTEGER_CST
8576 || !host_integerp (len, 1))
8579 p1 = c_getstr (arg1);
8580 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8586 if (target_char_cast (arg2, &c))
8589 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8592 return build_int_cst (TREE_TYPE (arg1), 0);
8594 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8596 return fold_convert_loc (loc, type, tem);
8602 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8603 Return NULL_TREE if no simplification can be made. */
8606 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8608 const char *p1, *p2;
8610 if (!validate_arg (arg1, POINTER_TYPE)
8611 || !validate_arg (arg2, POINTER_TYPE)
8612 || !validate_arg (len, INTEGER_TYPE))
8615 /* If the LEN parameter is zero, return zero. */
8616 if (integer_zerop (len))
8617 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8620 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8621 if (operand_equal_p (arg1, arg2, 0))
8622 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8624 p1 = c_getstr (arg1);
8625 p2 = c_getstr (arg2);
8627 /* If all arguments are constant, and the value of len is not greater
8628 than the lengths of arg1 and arg2, evaluate at compile-time. */
8629 if (host_integerp (len, 1) && p1 && p2
8630 && compare_tree_int (len, strlen (p1) + 1) <= 0
8631 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8633 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8636 return integer_one_node;
8638 return integer_minus_one_node;
8640 return integer_zero_node;
8643 /* If len parameter is one, return an expression corresponding to
8644 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8645 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8647 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8648 tree cst_uchar_ptr_node
8649 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8652 = fold_convert_loc (loc, integer_type_node,
8653 build1 (INDIRECT_REF, cst_uchar_node,
8654 fold_convert_loc (loc,
8658 = fold_convert_loc (loc, integer_type_node,
8659 build1 (INDIRECT_REF, cst_uchar_node,
8660 fold_convert_loc (loc,
8663 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8669 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8670 Return NULL_TREE if no simplification can be made. */
8673 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8675 const char *p1, *p2;
8677 if (!validate_arg (arg1, POINTER_TYPE)
8678 || !validate_arg (arg2, POINTER_TYPE))
8681 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8682 if (operand_equal_p (arg1, arg2, 0))
8683 return integer_zero_node;
8685 p1 = c_getstr (arg1);
8686 p2 = c_getstr (arg2);
8690 const int i = strcmp (p1, p2);
8692 return integer_minus_one_node;
8694 return integer_one_node;
8696 return integer_zero_node;
8699 /* If the second arg is "", return *(const unsigned char*)arg1. */
8700 if (p2 && *p2 == '\0')
8702 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8703 tree cst_uchar_ptr_node
8704 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8706 return fold_convert_loc (loc, integer_type_node,
8707 build1 (INDIRECT_REF, cst_uchar_node,
8708 fold_convert_loc (loc,
8713 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8714 if (p1 && *p1 == '\0')
8716 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8717 tree cst_uchar_ptr_node
8718 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8721 = fold_convert_loc (loc, integer_type_node,
8722 build1 (INDIRECT_REF, cst_uchar_node,
8723 fold_convert_loc (loc,
8726 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8732 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8733 Return NULL_TREE if no simplification can be made. */
8736 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8738 const char *p1, *p2;
8740 if (!validate_arg (arg1, POINTER_TYPE)
8741 || !validate_arg (arg2, POINTER_TYPE)
8742 || !validate_arg (len, INTEGER_TYPE))
8745 /* If the LEN parameter is zero, return zero. */
8746 if (integer_zerop (len))
8747 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8750 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8751 if (operand_equal_p (arg1, arg2, 0))
8752 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8754 p1 = c_getstr (arg1);
8755 p2 = c_getstr (arg2);
8757 if (host_integerp (len, 1) && p1 && p2)
8759 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8761 return integer_one_node;
8763 return integer_minus_one_node;
8765 return integer_zero_node;
8768 /* If the second arg is "", and the length is greater than zero,
8769 return *(const unsigned char*)arg1. */
8770 if (p2 && *p2 == '\0'
8771 && TREE_CODE (len) == INTEGER_CST
8772 && tree_int_cst_sgn (len) == 1)
8774 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8775 tree cst_uchar_ptr_node
8776 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8778 return fold_convert_loc (loc, integer_type_node,
8779 build1 (INDIRECT_REF, cst_uchar_node,
8780 fold_convert_loc (loc,
8785 /* If the first arg is "", and the length is greater than zero,
8786 return -*(const unsigned char*)arg2. */
8787 if (p1 && *p1 == '\0'
8788 && TREE_CODE (len) == INTEGER_CST
8789 && tree_int_cst_sgn (len) == 1)
8791 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8792 tree cst_uchar_ptr_node
8793 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8795 tree temp = fold_convert_loc (loc, integer_type_node,
8796 build1 (INDIRECT_REF, cst_uchar_node,
8797 fold_convert_loc (loc,
8800 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8803 /* If len parameter is one, return an expression corresponding to
8804 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8805 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8807 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8808 tree cst_uchar_ptr_node
8809 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8811 tree ind1 = fold_convert_loc (loc, integer_type_node,
8812 build1 (INDIRECT_REF, cst_uchar_node,
8813 fold_convert_loc (loc,
8816 tree ind2 = fold_convert_loc (loc, integer_type_node,
8817 build1 (INDIRECT_REF, cst_uchar_node,
8818 fold_convert_loc (loc,
8821 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8827 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8828 ARG. Return NULL_TREE if no simplification can be made. */
8831 fold_builtin_signbit (location_t loc, tree arg, tree type)
8835 if (!validate_arg (arg, REAL_TYPE))
8838 /* If ARG is a compile-time constant, determine the result. */
8839 if (TREE_CODE (arg) == REAL_CST
8840 && !TREE_OVERFLOW (arg))
8844 c = TREE_REAL_CST (arg);
8845 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8846 return fold_convert_loc (loc, type, temp);
8849 /* If ARG is non-negative, the result is always zero. */
8850 if (tree_expr_nonnegative_p (arg))
8851 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8853 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8854 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8855 return fold_build2_loc (loc, LT_EXPR, type, arg,
8856 build_real (TREE_TYPE (arg), dconst0));
8861 /* Fold function call to builtin copysign, copysignf or copysignl with
8862 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8866 fold_builtin_copysign (location_t loc, tree fndecl,
8867 tree arg1, tree arg2, tree type)
8871 if (!validate_arg (arg1, REAL_TYPE)
8872 || !validate_arg (arg2, REAL_TYPE))
8875 /* copysign(X,X) is X. */
8876 if (operand_equal_p (arg1, arg2, 0))
8877 return fold_convert_loc (loc, type, arg1);
8879 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8880 if (TREE_CODE (arg1) == REAL_CST
8881 && TREE_CODE (arg2) == REAL_CST
8882 && !TREE_OVERFLOW (arg1)
8883 && !TREE_OVERFLOW (arg2))
8885 REAL_VALUE_TYPE c1, c2;
8887 c1 = TREE_REAL_CST (arg1);
8888 c2 = TREE_REAL_CST (arg2);
8889 /* c1.sign := c2.sign. */
8890 real_copysign (&c1, &c2);
8891 return build_real (type, c1);
8894 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8895 Remember to evaluate Y for side-effects. */
8896 if (tree_expr_nonnegative_p (arg2))
8897 return omit_one_operand_loc (loc, type,
8898 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8901 /* Strip sign changing operations for the first argument. */
8902 tem = fold_strip_sign_ops (arg1);
8904 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8909 /* Fold a call to builtin isascii with argument ARG. */
8912 fold_builtin_isascii (location_t loc, tree arg)
8914 if (!validate_arg (arg, INTEGER_TYPE))
8918 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8919 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8920 build_int_cst (NULL_TREE,
8921 ~ (unsigned HOST_WIDE_INT) 0x7f));
8922 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8923 arg, integer_zero_node);
8927 /* Fold a call to builtin toascii with argument ARG. */
8930 fold_builtin_toascii (location_t loc, tree arg)
8932 if (!validate_arg (arg, INTEGER_TYPE))
8935 /* Transform toascii(c) -> (c & 0x7f). */
8936 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8937 build_int_cst (NULL_TREE, 0x7f));
8940 /* Fold a call to builtin isdigit with argument ARG. */
8943 fold_builtin_isdigit (location_t loc, tree arg)
8945 if (!validate_arg (arg, INTEGER_TYPE))
8949 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8950 /* According to the C standard, isdigit is unaffected by locale.
8951 However, it definitely is affected by the target character set. */
8952 unsigned HOST_WIDE_INT target_digit0
8953 = lang_hooks.to_target_charset ('0');
8955 if (target_digit0 == 0)
8958 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8959 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8960 build_int_cst (unsigned_type_node, target_digit0));
8961 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8962 build_int_cst (unsigned_type_node, 9));
8966 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8969 fold_builtin_fabs (location_t loc, tree arg, tree type)
8971 if (!validate_arg (arg, REAL_TYPE))
8974 arg = fold_convert_loc (loc, type, arg);
8975 if (TREE_CODE (arg) == REAL_CST)
8976 return fold_abs_const (arg, type);
8977 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8980 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8983 fold_builtin_abs (location_t loc, tree arg, tree type)
8985 if (!validate_arg (arg, INTEGER_TYPE))
8988 arg = fold_convert_loc (loc, type, arg);
8989 if (TREE_CODE (arg) == INTEGER_CST)
8990 return fold_abs_const (arg, type);
8991 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8994 /* Fold a call to builtin fmin or fmax. */
8997 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8998 tree type, bool max)
9000 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9002 /* Calculate the result when the argument is a constant. */
9003 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9008 /* If either argument is NaN, return the other one. Avoid the
9009 transformation if we get (and honor) a signalling NaN. Using
9010 omit_one_operand() ensures we create a non-lvalue. */
9011 if (TREE_CODE (arg0) == REAL_CST
9012 && real_isnan (&TREE_REAL_CST (arg0))
9013 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9014 || ! TREE_REAL_CST (arg0).signalling))
9015 return omit_one_operand_loc (loc, type, arg1, arg0);
9016 if (TREE_CODE (arg1) == REAL_CST
9017 && real_isnan (&TREE_REAL_CST (arg1))
9018 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9019 || ! TREE_REAL_CST (arg1).signalling))
9020 return omit_one_operand_loc (loc, type, arg0, arg1);
9022 /* Transform fmin/fmax(x,x) -> x. */
9023 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9024 return omit_one_operand_loc (loc, type, arg0, arg1);
9026 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9027 functions to return the numeric arg if the other one is NaN.
9028 These tree codes don't honor that, so only transform if
9029 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9030 handled, so we don't have to worry about it either. */
9031 if (flag_finite_math_only)
9032 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9033 fold_convert_loc (loc, type, arg0),
9034 fold_convert_loc (loc, type, arg1));
9039 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9042 fold_builtin_carg (location_t loc, tree arg, tree type)
9044 if (validate_arg (arg, COMPLEX_TYPE)
9045 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9047 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9051 tree new_arg = builtin_save_expr (arg);
9052 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9053 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9054 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9061 /* Fold a call to builtin logb/ilogb. */
9064 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9066 if (! validate_arg (arg, REAL_TYPE))
9071 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9073 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9079 /* If arg is Inf or NaN and we're logb, return it. */
9080 if (TREE_CODE (rettype) == REAL_TYPE)
9081 return fold_convert_loc (loc, rettype, arg);
9082 /* Fall through... */
9084 /* Zero may set errno and/or raise an exception for logb, also
9085 for ilogb we don't know FP_ILOGB0. */
9088 /* For normal numbers, proceed iff radix == 2. In GCC,
9089 normalized significands are in the range [0.5, 1.0). We
9090 want the exponent as if they were [1.0, 2.0) so get the
9091 exponent and subtract 1. */
9092 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9093 return fold_convert_loc (loc, rettype,
9094 build_int_cst (NULL_TREE,
9095 REAL_EXP (value)-1));
9103 /* Fold a call to builtin significand, if radix == 2. */
9106 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9108 if (! validate_arg (arg, REAL_TYPE))
9113 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9115 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9122 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9123 return fold_convert_loc (loc, rettype, arg);
9125 /* For normal numbers, proceed iff radix == 2. */
9126 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9128 REAL_VALUE_TYPE result = *value;
9129 /* In GCC, normalized significands are in the range [0.5,
9130 1.0). We want them to be [1.0, 2.0) so set the
9132 SET_REAL_EXP (&result, 1);
9133 return build_real (rettype, result);
9142 /* Fold a call to builtin frexp, we can assume the base is 2. */
9145 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9147 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9152 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9155 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9157 /* Proceed if a valid pointer type was passed in. */
9158 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9160 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9166 /* For +-0, return (*exp = 0, +-0). */
9167 exp = integer_zero_node;
9172 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9173 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9176 /* Since the frexp function always expects base 2, and in
9177 GCC normalized significands are already in the range
9178 [0.5, 1.0), we have exactly what frexp wants. */
9179 REAL_VALUE_TYPE frac_rvt = *value;
9180 SET_REAL_EXP (&frac_rvt, 0);
9181 frac = build_real (rettype, frac_rvt);
9182 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9189 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9190 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9191 TREE_SIDE_EFFECTS (arg1) = 1;
9192 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9198 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9199 then we can assume the base is two. If it's false, then we have to
9200 check the mode of the TYPE parameter in certain cases. */
9203 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9204 tree type, bool ldexp)
9206 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9211 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9212 if (real_zerop (arg0) || integer_zerop (arg1)
9213 || (TREE_CODE (arg0) == REAL_CST
9214 && !real_isfinite (&TREE_REAL_CST (arg0))))
9215 return omit_one_operand_loc (loc, type, arg0, arg1);
9217 /* If both arguments are constant, then try to evaluate it. */
9218 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9219 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9220 && host_integerp (arg1, 0))
9222 /* Bound the maximum adjustment to twice the range of the
9223 mode's valid exponents. Use abs to ensure the range is
9224 positive as a sanity check. */
9225 const long max_exp_adj = 2 *
9226 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9227 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9229 /* Get the user-requested adjustment. */
9230 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9232 /* The requested adjustment must be inside this range. This
9233 is a preliminary cap to avoid things like overflow, we
9234 may still fail to compute the result for other reasons. */
9235 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9237 REAL_VALUE_TYPE initial_result;
9239 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9241 /* Ensure we didn't overflow. */
9242 if (! real_isinf (&initial_result))
9244 const REAL_VALUE_TYPE trunc_result
9245 = real_value_truncate (TYPE_MODE (type), initial_result);
9247 /* Only proceed if the target mode can hold the
9249 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9250 return build_real (type, trunc_result);
9259 /* Fold a call to builtin modf. */
9262 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9264 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9269 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9272 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9274 /* Proceed if a valid pointer type was passed in. */
9275 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9277 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9278 REAL_VALUE_TYPE trunc, frac;
9284 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9285 trunc = frac = *value;
9288 /* For +-Inf, return (*arg1 = arg0, +-0). */
9290 frac.sign = value->sign;
9294 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9295 real_trunc (&trunc, VOIDmode, value);
9296 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9297 /* If the original number was negative and already
9298 integral, then the fractional part is -0.0. */
9299 if (value->sign && frac.cl == rvc_zero)
9300 frac.sign = value->sign;
9304 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9305 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9306 build_real (rettype, trunc));
9307 TREE_SIDE_EFFECTS (arg1) = 1;
9308 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9309 build_real (rettype, frac));
9315 /* Given a location LOC, an interclass builtin function decl FNDECL
9316 and its single argument ARG, return an folded expression computing
9317 the same, or NULL_TREE if we either couldn't or didn't want to fold
9318 (the latter happen if there's an RTL instruction available). */
9321 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9323 enum machine_mode mode;
9325 if (!validate_arg (arg, REAL_TYPE))
9328 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9331 mode = TYPE_MODE (TREE_TYPE (arg));
9333 /* If there is no optab, try generic code. */
9334 switch (DECL_FUNCTION_CODE (fndecl))
9338 CASE_FLT_FN (BUILT_IN_ISINF):
9340 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9341 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9342 tree const type = TREE_TYPE (arg);
9346 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9347 real_from_string (&r, buf);
9348 result = build_call_expr (isgr_fn, 2,
9349 fold_build1_loc (loc, ABS_EXPR, type, arg),
9350 build_real (type, r));
9353 CASE_FLT_FN (BUILT_IN_FINITE):
9354 case BUILT_IN_ISFINITE:
9356 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9357 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9358 tree const type = TREE_TYPE (arg);
9362 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9363 real_from_string (&r, buf);
9364 result = build_call_expr (isle_fn, 2,
9365 fold_build1_loc (loc, ABS_EXPR, type, arg),
9366 build_real (type, r));
9367 /*result = fold_build2_loc (loc, UNGT_EXPR,
9368 TREE_TYPE (TREE_TYPE (fndecl)),
9369 fold_build1_loc (loc, ABS_EXPR, type, arg),
9370 build_real (type, r));
9371 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9372 TREE_TYPE (TREE_TYPE (fndecl)),
9376 case BUILT_IN_ISNORMAL:
9378 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9379 islessequal(fabs(x),DBL_MAX). */
9380 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9381 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9382 tree const type = TREE_TYPE (arg);
9383 REAL_VALUE_TYPE rmax, rmin;
9386 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9387 real_from_string (&rmax, buf);
9388 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9389 real_from_string (&rmin, buf);
9390 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9391 result = build_call_expr (isle_fn, 2, arg,
9392 build_real (type, rmax));
9393 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9394 build_call_expr (isge_fn, 2, arg,
9395 build_real (type, rmin)));
9405 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9406 ARG is the argument for the call. */
9409 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9411 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9414 if (!validate_arg (arg, REAL_TYPE))
9417 switch (builtin_index)
9419 case BUILT_IN_ISINF:
9420 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9421 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9423 if (TREE_CODE (arg) == REAL_CST)
9425 r = TREE_REAL_CST (arg);
9426 if (real_isinf (&r))
9427 return real_compare (GT_EXPR, &r, &dconst0)
9428 ? integer_one_node : integer_minus_one_node;
9430 return integer_zero_node;
9435 case BUILT_IN_ISINF_SIGN:
9437 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9438 /* In a boolean context, GCC will fold the inner COND_EXPR to
9439 1. So e.g. "if (isinf_sign(x))" would be folded to just
9440 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9441 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9442 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9443 tree tmp = NULL_TREE;
9445 arg = builtin_save_expr (arg);
9447 if (signbit_fn && isinf_fn)
9449 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9450 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9452 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9453 signbit_call, integer_zero_node);
9454 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9455 isinf_call, integer_zero_node);
9457 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9458 integer_minus_one_node, integer_one_node);
9459 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9467 case BUILT_IN_ISFINITE:
9468 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9469 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9470 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9472 if (TREE_CODE (arg) == REAL_CST)
9474 r = TREE_REAL_CST (arg);
9475 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9480 case BUILT_IN_ISNAN:
9481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9482 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9484 if (TREE_CODE (arg) == REAL_CST)
9486 r = TREE_REAL_CST (arg);
9487 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9490 arg = builtin_save_expr (arg);
9491 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9498 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9499 This builtin will generate code to return the appropriate floating
9500 point classification depending on the value of the floating point
9501 number passed in. The possible return values must be supplied as
9502 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9503 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9504 one floating point argument which is "type generic". */
9507 fold_builtin_fpclassify (location_t loc, tree exp)
9509 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9510 arg, type, res, tmp;
9511 enum machine_mode mode;
9515 /* Verify the required arguments in the original call. */
9516 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9517 INTEGER_TYPE, INTEGER_TYPE,
9518 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9521 fp_nan = CALL_EXPR_ARG (exp, 0);
9522 fp_infinite = CALL_EXPR_ARG (exp, 1);
9523 fp_normal = CALL_EXPR_ARG (exp, 2);
9524 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9525 fp_zero = CALL_EXPR_ARG (exp, 4);
9526 arg = CALL_EXPR_ARG (exp, 5);
9527 type = TREE_TYPE (arg);
9528 mode = TYPE_MODE (type);
9529 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9533 (fabs(x) == Inf ? FP_INFINITE :
9534 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9535 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9537 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9538 build_real (type, dconst0));
9539 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9540 tmp, fp_zero, fp_subnormal);
9542 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9543 real_from_string (&r, buf);
9544 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9545 arg, build_real (type, r));
9546 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9548 if (HONOR_INFINITIES (mode))
9551 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9552 build_real (type, r));
9553 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9557 if (HONOR_NANS (mode))
9559 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9560 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9566 /* Fold a call to an unordered comparison function such as
9567 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9568 being called and ARG0 and ARG1 are the arguments for the call.
9569 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9570 the opposite of the desired result. UNORDERED_CODE is used
9571 for modes that can hold NaNs and ORDERED_CODE is used for
9575 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9576 enum tree_code unordered_code,
9577 enum tree_code ordered_code)
9579 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9580 enum tree_code code;
9582 enum tree_code code0, code1;
9583 tree cmp_type = NULL_TREE;
9585 type0 = TREE_TYPE (arg0);
9586 type1 = TREE_TYPE (arg1);
9588 code0 = TREE_CODE (type0);
9589 code1 = TREE_CODE (type1);
9591 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9592 /* Choose the wider of two real types. */
9593 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9595 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9597 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9600 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9601 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9603 if (unordered_code == UNORDERED_EXPR)
9605 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9606 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9607 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9610 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9612 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9613 fold_build2_loc (loc, code, type, arg0, arg1));
9616 /* Fold a call to built-in function FNDECL with 0 arguments.
9617 IGNORE is true if the result of the function call is ignored. This
9618 function returns NULL_TREE if no simplification was possible. */
9621 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9623 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9624 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9627 CASE_FLT_FN (BUILT_IN_INF):
9628 case BUILT_IN_INFD32:
9629 case BUILT_IN_INFD64:
9630 case BUILT_IN_INFD128:
9631 return fold_builtin_inf (loc, type, true);
9633 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9634 return fold_builtin_inf (loc, type, false);
9636 case BUILT_IN_CLASSIFY_TYPE:
9637 return fold_builtin_classify_type (NULL_TREE);
9645 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9646 IGNORE is true if the result of the function call is ignored. This
9647 function returns NULL_TREE if no simplification was possible. */
9650 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9652 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9653 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9657 case BUILT_IN_CONSTANT_P:
9659 tree val = fold_builtin_constant_p (arg0);
9661 /* Gimplification will pull the CALL_EXPR for the builtin out of
9662 an if condition. When not optimizing, we'll not CSE it back.
9663 To avoid link error types of regressions, return false now. */
9664 if (!val && !optimize)
9665 val = integer_zero_node;
9670 case BUILT_IN_CLASSIFY_TYPE:
9671 return fold_builtin_classify_type (arg0);
9673 case BUILT_IN_STRLEN:
9674 return fold_builtin_strlen (loc, type, arg0);
9676 CASE_FLT_FN (BUILT_IN_FABS):
9677 return fold_builtin_fabs (loc, arg0, type);
9681 case BUILT_IN_LLABS:
9682 case BUILT_IN_IMAXABS:
9683 return fold_builtin_abs (loc, arg0, type);
9685 CASE_FLT_FN (BUILT_IN_CONJ):
9686 if (validate_arg (arg0, COMPLEX_TYPE)
9687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9688 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9691 CASE_FLT_FN (BUILT_IN_CREAL):
9692 if (validate_arg (arg0, COMPLEX_TYPE)
9693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9694 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9697 CASE_FLT_FN (BUILT_IN_CIMAG):
9698 if (validate_arg (arg0, COMPLEX_TYPE)
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9700 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9703 CASE_FLT_FN (BUILT_IN_CCOS):
9704 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9706 CASE_FLT_FN (BUILT_IN_CCOSH):
9707 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9709 CASE_FLT_FN (BUILT_IN_CSIN):
9710 if (validate_arg (arg0, COMPLEX_TYPE)
9711 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9712 return do_mpc_arg1 (arg0, type, mpc_sin);
9715 CASE_FLT_FN (BUILT_IN_CSINH):
9716 if (validate_arg (arg0, COMPLEX_TYPE)
9717 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9718 return do_mpc_arg1 (arg0, type, mpc_sinh);
9721 CASE_FLT_FN (BUILT_IN_CTAN):
9722 if (validate_arg (arg0, COMPLEX_TYPE)
9723 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724 return do_mpc_arg1 (arg0, type, mpc_tan);
9727 CASE_FLT_FN (BUILT_IN_CTANH):
9728 if (validate_arg (arg0, COMPLEX_TYPE)
9729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730 return do_mpc_arg1 (arg0, type, mpc_tanh);
9733 CASE_FLT_FN (BUILT_IN_CLOG):
9734 if (validate_arg (arg0, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736 return do_mpc_arg1 (arg0, type, mpc_log);
9739 CASE_FLT_FN (BUILT_IN_CSQRT):
9740 if (validate_arg (arg0, COMPLEX_TYPE)
9741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9742 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9745 CASE_FLT_FN (BUILT_IN_CASIN):
9746 if (validate_arg (arg0, COMPLEX_TYPE)
9747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9748 return do_mpc_arg1 (arg0, type, mpc_asin);
9751 CASE_FLT_FN (BUILT_IN_CACOS):
9752 if (validate_arg (arg0, COMPLEX_TYPE)
9753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9754 return do_mpc_arg1 (arg0, type, mpc_acos);
9757 CASE_FLT_FN (BUILT_IN_CATAN):
9758 if (validate_arg (arg0, COMPLEX_TYPE)
9759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9760 return do_mpc_arg1 (arg0, type, mpc_atan);
9763 CASE_FLT_FN (BUILT_IN_CASINH):
9764 if (validate_arg (arg0, COMPLEX_TYPE)
9765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9766 return do_mpc_arg1 (arg0, type, mpc_asinh);
9769 CASE_FLT_FN (BUILT_IN_CACOSH):
9770 if (validate_arg (arg0, COMPLEX_TYPE)
9771 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9772 return do_mpc_arg1 (arg0, type, mpc_acosh);
9775 CASE_FLT_FN (BUILT_IN_CATANH):
9776 if (validate_arg (arg0, COMPLEX_TYPE)
9777 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9778 return do_mpc_arg1 (arg0, type, mpc_atanh);
9781 CASE_FLT_FN (BUILT_IN_CABS):
9782 return fold_builtin_cabs (loc, arg0, type, fndecl);
9784 CASE_FLT_FN (BUILT_IN_CARG):
9785 return fold_builtin_carg (loc, arg0, type);
9787 CASE_FLT_FN (BUILT_IN_SQRT):
9788 return fold_builtin_sqrt (loc, arg0, type);
9790 CASE_FLT_FN (BUILT_IN_CBRT):
9791 return fold_builtin_cbrt (loc, arg0, type);
9793 CASE_FLT_FN (BUILT_IN_ASIN):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9796 &dconstm1, &dconst1, true);
9799 CASE_FLT_FN (BUILT_IN_ACOS):
9800 if (validate_arg (arg0, REAL_TYPE))
9801 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9802 &dconstm1, &dconst1, true);
9805 CASE_FLT_FN (BUILT_IN_ATAN):
9806 if (validate_arg (arg0, REAL_TYPE))
9807 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9810 CASE_FLT_FN (BUILT_IN_ASINH):
9811 if (validate_arg (arg0, REAL_TYPE))
9812 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9815 CASE_FLT_FN (BUILT_IN_ACOSH):
9816 if (validate_arg (arg0, REAL_TYPE))
9817 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9818 &dconst1, NULL, true);
9821 CASE_FLT_FN (BUILT_IN_ATANH):
9822 if (validate_arg (arg0, REAL_TYPE))
9823 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9824 &dconstm1, &dconst1, false);
9827 CASE_FLT_FN (BUILT_IN_SIN):
9828 if (validate_arg (arg0, REAL_TYPE))
9829 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9832 CASE_FLT_FN (BUILT_IN_COS):
9833 return fold_builtin_cos (loc, arg0, type, fndecl);
9835 CASE_FLT_FN (BUILT_IN_TAN):
9836 return fold_builtin_tan (arg0, type);
9838 CASE_FLT_FN (BUILT_IN_CEXP):
9839 return fold_builtin_cexp (loc, arg0, type);
9841 CASE_FLT_FN (BUILT_IN_CEXPI):
9842 if (validate_arg (arg0, REAL_TYPE))
9843 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9846 CASE_FLT_FN (BUILT_IN_SINH):
9847 if (validate_arg (arg0, REAL_TYPE))
9848 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9851 CASE_FLT_FN (BUILT_IN_COSH):
9852 return fold_builtin_cosh (loc, arg0, type, fndecl);
9854 CASE_FLT_FN (BUILT_IN_TANH):
9855 if (validate_arg (arg0, REAL_TYPE))
9856 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9859 CASE_FLT_FN (BUILT_IN_ERF):
9860 if (validate_arg (arg0, REAL_TYPE))
9861 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9864 CASE_FLT_FN (BUILT_IN_ERFC):
9865 if (validate_arg (arg0, REAL_TYPE))
9866 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9869 CASE_FLT_FN (BUILT_IN_TGAMMA):
9870 if (validate_arg (arg0, REAL_TYPE))
9871 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9874 CASE_FLT_FN (BUILT_IN_EXP):
9875 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9877 CASE_FLT_FN (BUILT_IN_EXP2):
9878 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9880 CASE_FLT_FN (BUILT_IN_EXP10):
9881 CASE_FLT_FN (BUILT_IN_POW10):
9882 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9884 CASE_FLT_FN (BUILT_IN_EXPM1):
9885 if (validate_arg (arg0, REAL_TYPE))
9886 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9889 CASE_FLT_FN (BUILT_IN_LOG):
9890 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9892 CASE_FLT_FN (BUILT_IN_LOG2):
9893 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9895 CASE_FLT_FN (BUILT_IN_LOG10):
9896 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9898 CASE_FLT_FN (BUILT_IN_LOG1P):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9901 &dconstm1, NULL, false);
9904 CASE_FLT_FN (BUILT_IN_J0):
9905 if (validate_arg (arg0, REAL_TYPE))
9906 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9910 CASE_FLT_FN (BUILT_IN_J1):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9916 CASE_FLT_FN (BUILT_IN_Y0):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9919 &dconst0, NULL, false);
9922 CASE_FLT_FN (BUILT_IN_Y1):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9925 &dconst0, NULL, false);
9928 CASE_FLT_FN (BUILT_IN_NAN):
9929 case BUILT_IN_NAND32:
9930 case BUILT_IN_NAND64:
9931 case BUILT_IN_NAND128:
9932 return fold_builtin_nan (arg0, type, true);
9934 CASE_FLT_FN (BUILT_IN_NANS):
9935 return fold_builtin_nan (arg0, type, false);
9937 CASE_FLT_FN (BUILT_IN_FLOOR):
9938 return fold_builtin_floor (loc, fndecl, arg0);
9940 CASE_FLT_FN (BUILT_IN_CEIL):
9941 return fold_builtin_ceil (loc, fndecl, arg0);
9943 CASE_FLT_FN (BUILT_IN_TRUNC):
9944 return fold_builtin_trunc (loc, fndecl, arg0);
9946 CASE_FLT_FN (BUILT_IN_ROUND):
9947 return fold_builtin_round (loc, fndecl, arg0);
9949 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9950 CASE_FLT_FN (BUILT_IN_RINT):
9951 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9953 CASE_FLT_FN (BUILT_IN_LCEIL):
9954 CASE_FLT_FN (BUILT_IN_LLCEIL):
9955 CASE_FLT_FN (BUILT_IN_LFLOOR):
9956 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9957 CASE_FLT_FN (BUILT_IN_LROUND):
9958 CASE_FLT_FN (BUILT_IN_LLROUND):
9959 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9961 CASE_FLT_FN (BUILT_IN_LRINT):
9962 CASE_FLT_FN (BUILT_IN_LLRINT):
9963 return fold_fixed_mathfn (loc, fndecl, arg0);
9965 case BUILT_IN_BSWAP32:
9966 case BUILT_IN_BSWAP64:
9967 return fold_builtin_bswap (fndecl, arg0);
9969 CASE_INT_FN (BUILT_IN_FFS):
9970 CASE_INT_FN (BUILT_IN_CLZ):
9971 CASE_INT_FN (BUILT_IN_CTZ):
9972 CASE_INT_FN (BUILT_IN_POPCOUNT):
9973 CASE_INT_FN (BUILT_IN_PARITY):
9974 return fold_builtin_bitop (fndecl, arg0);
9976 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9977 return fold_builtin_signbit (loc, arg0, type);
9979 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9980 return fold_builtin_significand (loc, arg0, type);
9982 CASE_FLT_FN (BUILT_IN_ILOGB):
9983 CASE_FLT_FN (BUILT_IN_LOGB):
9984 return fold_builtin_logb (loc, arg0, type);
9986 case BUILT_IN_ISASCII:
9987 return fold_builtin_isascii (loc, arg0);
9989 case BUILT_IN_TOASCII:
9990 return fold_builtin_toascii (loc, arg0);
9992 case BUILT_IN_ISDIGIT:
9993 return fold_builtin_isdigit (loc, arg0);
9995 CASE_FLT_FN (BUILT_IN_FINITE):
9996 case BUILT_IN_FINITED32:
9997 case BUILT_IN_FINITED64:
9998 case BUILT_IN_FINITED128:
9999 case BUILT_IN_ISFINITE:
10001 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10004 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10007 CASE_FLT_FN (BUILT_IN_ISINF):
10008 case BUILT_IN_ISINFD32:
10009 case BUILT_IN_ISINFD64:
10010 case BUILT_IN_ISINFD128:
10012 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10015 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10018 case BUILT_IN_ISNORMAL:
10019 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10021 case BUILT_IN_ISINF_SIGN:
10022 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10024 CASE_FLT_FN (BUILT_IN_ISNAN):
10025 case BUILT_IN_ISNAND32:
10026 case BUILT_IN_ISNAND64:
10027 case BUILT_IN_ISNAND128:
10028 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10030 case BUILT_IN_PRINTF:
10031 case BUILT_IN_PRINTF_UNLOCKED:
10032 case BUILT_IN_VPRINTF:
10033 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10043 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10044 IGNORE is true if the result of the function call is ignored. This
10045 function returns NULL_TREE if no simplification was possible. */
10048 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10050 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10051 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10055 CASE_FLT_FN (BUILT_IN_JN):
10056 if (validate_arg (arg0, INTEGER_TYPE)
10057 && validate_arg (arg1, REAL_TYPE))
10058 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10061 CASE_FLT_FN (BUILT_IN_YN):
10062 if (validate_arg (arg0, INTEGER_TYPE)
10063 && validate_arg (arg1, REAL_TYPE))
10064 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10068 CASE_FLT_FN (BUILT_IN_DREM):
10069 CASE_FLT_FN (BUILT_IN_REMAINDER):
10070 if (validate_arg (arg0, REAL_TYPE)
10071 && validate_arg(arg1, REAL_TYPE))
10072 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10075 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10076 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10077 if (validate_arg (arg0, REAL_TYPE)
10078 && validate_arg(arg1, POINTER_TYPE))
10079 return do_mpfr_lgamma_r (arg0, arg1, type);
10082 CASE_FLT_FN (BUILT_IN_ATAN2):
10083 if (validate_arg (arg0, REAL_TYPE)
10084 && validate_arg(arg1, REAL_TYPE))
10085 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10088 CASE_FLT_FN (BUILT_IN_FDIM):
10089 if (validate_arg (arg0, REAL_TYPE)
10090 && validate_arg(arg1, REAL_TYPE))
10091 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10094 CASE_FLT_FN (BUILT_IN_HYPOT):
10095 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10097 CASE_FLT_FN (BUILT_IN_CPOW):
10098 if (validate_arg (arg0, COMPLEX_TYPE)
10099 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10100 && validate_arg (arg1, COMPLEX_TYPE)
10101 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10102 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10105 CASE_FLT_FN (BUILT_IN_LDEXP):
10106 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10107 CASE_FLT_FN (BUILT_IN_SCALBN):
10108 CASE_FLT_FN (BUILT_IN_SCALBLN):
10109 return fold_builtin_load_exponent (loc, arg0, arg1,
10110 type, /*ldexp=*/false);
10112 CASE_FLT_FN (BUILT_IN_FREXP):
10113 return fold_builtin_frexp (loc, arg0, arg1, type);
10115 CASE_FLT_FN (BUILT_IN_MODF):
10116 return fold_builtin_modf (loc, arg0, arg1, type);
10118 case BUILT_IN_BZERO:
10119 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10121 case BUILT_IN_FPUTS:
10122 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10124 case BUILT_IN_FPUTS_UNLOCKED:
10125 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10127 case BUILT_IN_STRSTR:
10128 return fold_builtin_strstr (loc, arg0, arg1, type);
10130 case BUILT_IN_STRCAT:
10131 return fold_builtin_strcat (loc, arg0, arg1);
10133 case BUILT_IN_STRSPN:
10134 return fold_builtin_strspn (loc, arg0, arg1);
10136 case BUILT_IN_STRCSPN:
10137 return fold_builtin_strcspn (loc, arg0, arg1);
10139 case BUILT_IN_STRCHR:
10140 case BUILT_IN_INDEX:
10141 return fold_builtin_strchr (loc, arg0, arg1, type);
10143 case BUILT_IN_STRRCHR:
10144 case BUILT_IN_RINDEX:
10145 return fold_builtin_strrchr (loc, arg0, arg1, type);
10147 case BUILT_IN_STRCPY:
10148 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10150 case BUILT_IN_STPCPY:
10153 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10157 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10160 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10163 case BUILT_IN_STRCMP:
10164 return fold_builtin_strcmp (loc, arg0, arg1);
10166 case BUILT_IN_STRPBRK:
10167 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10169 case BUILT_IN_EXPECT:
10170 return fold_builtin_expect (loc, arg0, arg1);
10172 CASE_FLT_FN (BUILT_IN_POW):
10173 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10175 CASE_FLT_FN (BUILT_IN_POWI):
10176 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10178 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10179 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10181 CASE_FLT_FN (BUILT_IN_FMIN):
10182 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10184 CASE_FLT_FN (BUILT_IN_FMAX):
10185 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10187 case BUILT_IN_ISGREATER:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNLE_EXPR, LE_EXPR);
10190 case BUILT_IN_ISGREATEREQUAL:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNLT_EXPR, LT_EXPR);
10193 case BUILT_IN_ISLESS:
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNGE_EXPR, GE_EXPR);
10196 case BUILT_IN_ISLESSEQUAL:
10197 return fold_builtin_unordered_cmp (loc, fndecl,
10198 arg0, arg1, UNGT_EXPR, GT_EXPR);
10199 case BUILT_IN_ISLESSGREATER:
10200 return fold_builtin_unordered_cmp (loc, fndecl,
10201 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10202 case BUILT_IN_ISUNORDERED:
10203 return fold_builtin_unordered_cmp (loc, fndecl,
10204 arg0, arg1, UNORDERED_EXPR,
10207 /* We do the folding for va_start in the expander. */
10208 case BUILT_IN_VA_START:
10211 case BUILT_IN_SPRINTF:
10212 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10214 case BUILT_IN_OBJECT_SIZE:
10215 return fold_builtin_object_size (arg0, arg1);
10217 case BUILT_IN_PRINTF:
10218 case BUILT_IN_PRINTF_UNLOCKED:
10219 case BUILT_IN_VPRINTF:
10220 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10222 case BUILT_IN_PRINTF_CHK:
10223 case BUILT_IN_VPRINTF_CHK:
10224 if (!validate_arg (arg0, INTEGER_TYPE)
10225 || TREE_SIDE_EFFECTS (arg0))
10228 return fold_builtin_printf (loc, fndecl,
10229 arg1, NULL_TREE, ignore, fcode);
10232 case BUILT_IN_FPRINTF:
10233 case BUILT_IN_FPRINTF_UNLOCKED:
10234 case BUILT_IN_VFPRINTF:
10235 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10244 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10245 and ARG2. IGNORE is true if the result of the function call is ignored.
10246 This function returns NULL_TREE if no simplification was possible. */
10249 fold_builtin_3 (location_t loc, tree fndecl,
10250 tree arg0, tree arg1, tree arg2, bool ignore)
10252 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10253 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10257 CASE_FLT_FN (BUILT_IN_SINCOS):
10258 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10260 CASE_FLT_FN (BUILT_IN_FMA):
10261 if (validate_arg (arg0, REAL_TYPE)
10262 && validate_arg(arg1, REAL_TYPE)
10263 && validate_arg(arg2, REAL_TYPE))
10264 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10267 CASE_FLT_FN (BUILT_IN_REMQUO):
10268 if (validate_arg (arg0, REAL_TYPE)
10269 && validate_arg(arg1, REAL_TYPE)
10270 && validate_arg(arg2, POINTER_TYPE))
10271 return do_mpfr_remquo (arg0, arg1, arg2);
10274 case BUILT_IN_MEMSET:
10275 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10277 case BUILT_IN_BCOPY:
10278 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10279 void_type_node, true, /*endp=*/3);
10281 case BUILT_IN_MEMCPY:
10282 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10283 type, ignore, /*endp=*/0);
10285 case BUILT_IN_MEMPCPY:
10286 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10287 type, ignore, /*endp=*/1);
10289 case BUILT_IN_MEMMOVE:
10290 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10291 type, ignore, /*endp=*/3);
10293 case BUILT_IN_STRNCAT:
10294 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10296 case BUILT_IN_STRNCPY:
10297 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10299 case BUILT_IN_STRNCMP:
10300 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10302 case BUILT_IN_MEMCHR:
10303 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10305 case BUILT_IN_BCMP:
10306 case BUILT_IN_MEMCMP:
10307 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10309 case BUILT_IN_SPRINTF:
10310 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10312 case BUILT_IN_STRCPY_CHK:
10313 case BUILT_IN_STPCPY_CHK:
10314 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10317 case BUILT_IN_STRCAT_CHK:
10318 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10320 case BUILT_IN_PRINTF_CHK:
10321 case BUILT_IN_VPRINTF_CHK:
10322 if (!validate_arg (arg0, INTEGER_TYPE)
10323 || TREE_SIDE_EFFECTS (arg0))
10326 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10329 case BUILT_IN_FPRINTF:
10330 case BUILT_IN_FPRINTF_UNLOCKED:
10331 case BUILT_IN_VFPRINTF:
10332 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10335 case BUILT_IN_FPRINTF_CHK:
10336 case BUILT_IN_VFPRINTF_CHK:
10337 if (!validate_arg (arg1, INTEGER_TYPE)
10338 || TREE_SIDE_EFFECTS (arg1))
10341 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10350 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10351 ARG2, and ARG3. IGNORE is true if the result of the function call is
10352 ignored. This function returns NULL_TREE if no simplification was
10356 fold_builtin_4 (location_t loc, tree fndecl,
10357 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10359 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10363 case BUILT_IN_MEMCPY_CHK:
10364 case BUILT_IN_MEMPCPY_CHK:
10365 case BUILT_IN_MEMMOVE_CHK:
10366 case BUILT_IN_MEMSET_CHK:
10367 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10369 DECL_FUNCTION_CODE (fndecl));
10371 case BUILT_IN_STRNCPY_CHK:
10372 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10374 case BUILT_IN_STRNCAT_CHK:
10375 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10377 case BUILT_IN_FPRINTF_CHK:
10378 case BUILT_IN_VFPRINTF_CHK:
10379 if (!validate_arg (arg1, INTEGER_TYPE)
10380 || TREE_SIDE_EFFECTS (arg1))
10383 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10393 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10394 arguments, where NARGS <= 4. IGNORE is true if the result of the
10395 function call is ignored. This function returns NULL_TREE if no
10396 simplification was possible. Note that this only folds builtins with
10397 fixed argument patterns. Foldings that do varargs-to-varargs
10398 transformations, or that match calls with more than 4 arguments,
10399 need to be handled with fold_builtin_varargs instead. */
10401 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10404 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10406 tree ret = NULL_TREE;
10411 ret = fold_builtin_0 (loc, fndecl, ignore);
10414 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10417 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10420 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10423 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10431 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10432 SET_EXPR_LOCATION (ret, loc);
10433 TREE_NO_WARNING (ret) = 1;
10439 /* Builtins with folding operations that operate on "..." arguments
10440 need special handling; we need to store the arguments in a convenient
10441 data structure before attempting any folding. Fortunately there are
10442 only a few builtins that fall into this category. FNDECL is the
10443 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10444 result of the function call is ignored. */
10447 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10448 bool ignore ATTRIBUTE_UNUSED)
10450 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10451 tree ret = NULL_TREE;
10455 case BUILT_IN_SPRINTF_CHK:
10456 case BUILT_IN_VSPRINTF_CHK:
10457 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10460 case BUILT_IN_SNPRINTF_CHK:
10461 case BUILT_IN_VSNPRINTF_CHK:
10462 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10465 case BUILT_IN_FPCLASSIFY:
10466 ret = fold_builtin_fpclassify (loc, exp);
10474 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10475 SET_EXPR_LOCATION (ret, loc);
10476 TREE_NO_WARNING (ret) = 1;
10482 /* Return true if FNDECL shouldn't be folded right now.
10483 If a built-in function has an inline attribute always_inline
10484 wrapper, defer folding it after always_inline functions have
10485 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10486 might not be performed. */
10489 avoid_folding_inline_builtin (tree fndecl)
10491 return (DECL_DECLARED_INLINE_P (fndecl)
10492 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10494 && !cfun->always_inline_functions_inlined
10495 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10498 /* A wrapper function for builtin folding that prevents warnings for
10499 "statement without effect" and the like, caused by removing the
10500 call node earlier than the warning is generated. */
10503 fold_call_expr (location_t loc, tree exp, bool ignore)
10505 tree ret = NULL_TREE;
10506 tree fndecl = get_callee_fndecl (exp);
10508 && TREE_CODE (fndecl) == FUNCTION_DECL
10509 && DECL_BUILT_IN (fndecl)
10510 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10511 yet. Defer folding until we see all the arguments
10512 (after inlining). */
10513 && !CALL_EXPR_VA_ARG_PACK (exp))
10515 int nargs = call_expr_nargs (exp);
10517 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10518 instead last argument is __builtin_va_arg_pack (). Defer folding
10519 even in that case, until arguments are finalized. */
10520 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10522 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10524 && TREE_CODE (fndecl2) == FUNCTION_DECL
10525 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10526 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10530 if (avoid_folding_inline_builtin (fndecl))
10533 /* FIXME: Don't use a list in this interface. */
10534 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10535 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10538 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10540 tree *args = CALL_EXPR_ARGP (exp);
10541 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10544 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10552 /* Conveniently construct a function call expression. FNDECL names the
10553 function to be called and ARGLIST is a TREE_LIST of arguments. */
10556 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10558 tree fntype = TREE_TYPE (fndecl);
10559 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10560 int n = list_length (arglist);
10561 tree *argarray = (tree *) alloca (n * sizeof (tree));
10564 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10565 argarray[i] = TREE_VALUE (arglist);
10566 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10569 /* Conveniently construct a function call expression. FNDECL names the
10570 function to be called, N is the number of arguments, and the "..."
10571 parameters are the argument expressions. */
10574 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10577 tree fntype = TREE_TYPE (fndecl);
10578 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10579 tree *argarray = (tree *) alloca (n * sizeof (tree));
10583 for (i = 0; i < n; i++)
10584 argarray[i] = va_arg (ap, tree);
10586 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10589 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10590 N arguments are passed in the array ARGARRAY. */
10593 fold_builtin_call_array (location_t loc, tree type,
10598 tree ret = NULL_TREE;
10602 if (TREE_CODE (fn) == ADDR_EXPR)
10604 tree fndecl = TREE_OPERAND (fn, 0);
10605 if (TREE_CODE (fndecl) == FUNCTION_DECL
10606 && DECL_BUILT_IN (fndecl))
10608 /* If last argument is __builtin_va_arg_pack (), arguments to this
10609 function are not finalized yet. Defer folding until they are. */
10610 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10612 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10614 && TREE_CODE (fndecl2) == FUNCTION_DECL
10615 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10616 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10617 return build_call_array_loc (loc, type, fn, n, argarray);
10619 if (avoid_folding_inline_builtin (fndecl))
10620 return build_call_array_loc (loc, type, fn, n, argarray);
10621 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10623 tree arglist = NULL_TREE;
10624 for (i = n - 1; i >= 0; i--)
10625 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10626 ret = targetm.fold_builtin (fndecl, arglist, false);
10629 return build_call_array_loc (loc, type, fn, n, argarray);
10631 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10633 /* First try the transformations that don't require consing up
10635 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10640 /* If we got this far, we need to build an exp. */
10641 exp = build_call_array_loc (loc, type, fn, n, argarray);
10642 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10643 return ret ? ret : exp;
10647 return build_call_array_loc (loc, type, fn, n, argarray);
10650 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10651 along with N new arguments specified as the "..." parameters. SKIP
10652 is the number of arguments in EXP to be omitted. This function is used
10653 to do varargs-to-varargs transformations. */
10656 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10658 int oldnargs = call_expr_nargs (exp);
10659 int nargs = oldnargs - skip + n;
10660 tree fntype = TREE_TYPE (fndecl);
10661 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10669 buffer = XALLOCAVEC (tree, nargs);
10671 for (i = 0; i < n; i++)
10672 buffer[i] = va_arg (ap, tree);
10674 for (j = skip; j < oldnargs; j++, i++)
10675 buffer[i] = CALL_EXPR_ARG (exp, j);
10678 buffer = CALL_EXPR_ARGP (exp) + skip;
10680 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10683 /* Validate a single argument ARG against a tree code CODE representing
10687 validate_arg (const_tree arg, enum tree_code code)
10691 else if (code == POINTER_TYPE)
10692 return POINTER_TYPE_P (TREE_TYPE (arg));
10693 else if (code == INTEGER_TYPE)
10694 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10695 return code == TREE_CODE (TREE_TYPE (arg));
10698 /* This function validates the types of a function call argument list
10699 against a specified list of tree_codes. If the last specifier is a 0,
10700 that represents an ellipses, otherwise the last specifier must be a
10703 This is the GIMPLE version of validate_arglist. Eventually we want to
10704 completely convert builtins.c to work from GIMPLEs and the tree based
10705 validate_arglist will then be removed. */
10708 validate_gimple_arglist (const_gimple call, ...)
10710 enum tree_code code;
10716 va_start (ap, call);
10721 code = (enum tree_code) va_arg (ap, int);
10725 /* This signifies an ellipses, any further arguments are all ok. */
10729 /* This signifies an endlink, if no arguments remain, return
10730 true, otherwise return false. */
10731 res = (i == gimple_call_num_args (call));
10734 /* If no parameters remain or the parameter's code does not
10735 match the specified code, return false. Otherwise continue
10736 checking any remaining arguments. */
10737 arg = gimple_call_arg (call, i++);
10738 if (!validate_arg (arg, code))
10745 /* We need gotos here since we can only have one VA_CLOSE in a
10753 /* This function validates the types of a function call argument list
10754 against a specified list of tree_codes. If the last specifier is a 0,
10755 that represents an ellipses, otherwise the last specifier must be a
10759 validate_arglist (const_tree callexpr, ...)
10761 enum tree_code code;
10764 const_call_expr_arg_iterator iter;
10767 va_start (ap, callexpr);
10768 init_const_call_expr_arg_iterator (callexpr, &iter);
10772 code = (enum tree_code) va_arg (ap, int);
10776 /* This signifies an ellipses, any further arguments are all ok. */
10780 /* This signifies an endlink, if no arguments remain, return
10781 true, otherwise return false. */
10782 res = !more_const_call_expr_args_p (&iter);
10785 /* If no parameters remain or the parameter's code does not
10786 match the specified code, return false. Otherwise continue
10787 checking any remaining arguments. */
10788 arg = next_const_call_expr_arg (&iter);
10789 if (!validate_arg (arg, code))
10796 /* We need gotos here since we can only have one VA_CLOSE in a
10804 /* Default target-specific builtin expander that does nothing. */
10807 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10808 rtx target ATTRIBUTE_UNUSED,
10809 rtx subtarget ATTRIBUTE_UNUSED,
10810 enum machine_mode mode ATTRIBUTE_UNUSED,
10811 int ignore ATTRIBUTE_UNUSED)
10816 /* Returns true is EXP represents data that would potentially reside
10817 in a readonly section. */
10820 readonly_data_expr (tree exp)
10824 if (TREE_CODE (exp) != ADDR_EXPR)
10827 exp = get_base_address (TREE_OPERAND (exp, 0));
10831 /* Make sure we call decl_readonly_section only for trees it
10832 can handle (since it returns true for everything it doesn't
10834 if (TREE_CODE (exp) == STRING_CST
10835 || TREE_CODE (exp) == CONSTRUCTOR
10836 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10837 return decl_readonly_section (exp, 0);
10842 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10843 to the call, and TYPE is its return type.
10845 Return NULL_TREE if no simplification was possible, otherwise return the
10846 simplified form of the call as a tree.
10848 The simplified form may be a constant or other expression which
10849 computes the same value, but in a more efficient manner (including
10850 calls to other builtin functions).
10852 The call may contain arguments which need to be evaluated, but
10853 which are not useful to determine the result of the call. In
10854 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10855 COMPOUND_EXPR will be an argument which must be evaluated.
10856 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10857 COMPOUND_EXPR in the chain will contain the tree for the simplified
10858 form of the builtin function call. */
10861 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10863 if (!validate_arg (s1, POINTER_TYPE)
10864 || !validate_arg (s2, POINTER_TYPE))
10869 const char *p1, *p2;
10871 p2 = c_getstr (s2);
10875 p1 = c_getstr (s1);
10878 const char *r = strstr (p1, p2);
10882 return build_int_cst (TREE_TYPE (s1), 0);
10884 /* Return an offset into the constant string argument. */
10885 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10886 s1, size_int (r - p1));
10887 return fold_convert_loc (loc, type, tem);
10890 /* The argument is const char *, and the result is char *, so we need
10891 a type conversion here to avoid a warning. */
10893 return fold_convert_loc (loc, type, s1);
10898 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10902 /* New argument list transforming strstr(s1, s2) to
10903 strchr(s1, s2[0]). */
10904 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10908 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10909 the call, and TYPE is its return type.
10911 Return NULL_TREE if no simplification was possible, otherwise return the
10912 simplified form of the call as a tree.
10914 The simplified form may be a constant or other expression which
10915 computes the same value, but in a more efficient manner (including
10916 calls to other builtin functions).
10918 The call may contain arguments which need to be evaluated, but
10919 which are not useful to determine the result of the call. In
10920 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10921 COMPOUND_EXPR will be an argument which must be evaluated.
10922 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10923 COMPOUND_EXPR in the chain will contain the tree for the simplified
10924 form of the builtin function call. */
10927 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10929 if (!validate_arg (s1, POINTER_TYPE)
10930 || !validate_arg (s2, INTEGER_TYPE))
10936 if (TREE_CODE (s2) != INTEGER_CST)
10939 p1 = c_getstr (s1);
10946 if (target_char_cast (s2, &c))
10949 r = strchr (p1, c);
10952 return build_int_cst (TREE_TYPE (s1), 0);
10954 /* Return an offset into the constant string argument. */
10955 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10956 s1, size_int (r - p1));
10957 return fold_convert_loc (loc, type, tem);
10963 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10964 the call, and TYPE is its return type.
10966 Return NULL_TREE if no simplification was possible, otherwise return the
10967 simplified form of the call as a tree.
10969 The simplified form may be a constant or other expression which
10970 computes the same value, but in a more efficient manner (including
10971 calls to other builtin functions).
10973 The call may contain arguments which need to be evaluated, but
10974 which are not useful to determine the result of the call. In
10975 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10976 COMPOUND_EXPR will be an argument which must be evaluated.
10977 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10978 COMPOUND_EXPR in the chain will contain the tree for the simplified
10979 form of the builtin function call. */
10982 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10984 if (!validate_arg (s1, POINTER_TYPE)
10985 || !validate_arg (s2, INTEGER_TYPE))
10992 if (TREE_CODE (s2) != INTEGER_CST)
10995 p1 = c_getstr (s1);
11002 if (target_char_cast (s2, &c))
11005 r = strrchr (p1, c);
11008 return build_int_cst (TREE_TYPE (s1), 0);
11010 /* Return an offset into the constant string argument. */
11011 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11012 s1, size_int (r - p1));
11013 return fold_convert_loc (loc, type, tem);
11016 if (! integer_zerop (s2))
11019 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11023 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11024 return build_call_expr_loc (loc, fn, 2, s1, s2);
11028 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11029 to the call, and TYPE is its return type.
11031 Return NULL_TREE if no simplification was possible, otherwise return the
11032 simplified form of the call as a tree.
11034 The simplified form may be a constant or other expression which
11035 computes the same value, but in a more efficient manner (including
11036 calls to other builtin functions).
11038 The call may contain arguments which need to be evaluated, but
11039 which are not useful to determine the result of the call. In
11040 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11041 COMPOUND_EXPR will be an argument which must be evaluated.
11042 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11043 COMPOUND_EXPR in the chain will contain the tree for the simplified
11044 form of the builtin function call. */
11047 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11049 if (!validate_arg (s1, POINTER_TYPE)
11050 || !validate_arg (s2, POINTER_TYPE))
11055 const char *p1, *p2;
11057 p2 = c_getstr (s2);
11061 p1 = c_getstr (s1);
11064 const char *r = strpbrk (p1, p2);
11068 return build_int_cst (TREE_TYPE (s1), 0);
11070 /* Return an offset into the constant string argument. */
11071 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11072 s1, size_int (r - p1));
11073 return fold_convert_loc (loc, type, tem);
11077 /* strpbrk(x, "") == NULL.
11078 Evaluate and ignore s1 in case it had side-effects. */
11079 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11082 return NULL_TREE; /* Really call strpbrk. */
11084 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11088 /* New argument list transforming strpbrk(s1, s2) to
11089 strchr(s1, s2[0]). */
11090 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11094 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11097 Return NULL_TREE if no simplification was possible, otherwise return the
11098 simplified form of the call as a tree.
11100 The simplified form may be a constant or other expression which
11101 computes the same value, but in a more efficient manner (including
11102 calls to other builtin functions).
11104 The call may contain arguments which need to be evaluated, but
11105 which are not useful to determine the result of the call. In
11106 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11107 COMPOUND_EXPR will be an argument which must be evaluated.
11108 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11109 COMPOUND_EXPR in the chain will contain the tree for the simplified
11110 form of the builtin function call. */
11113 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11115 if (!validate_arg (dst, POINTER_TYPE)
11116 || !validate_arg (src, POINTER_TYPE))
11120 const char *p = c_getstr (src);
11122 /* If the string length is zero, return the dst parameter. */
11123 if (p && *p == '\0')
11126 if (optimize_insn_for_speed_p ())
11128 /* See if we can store by pieces into (dst + strlen(dst)). */
11130 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11131 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11133 if (!strlen_fn || !strcpy_fn)
11136 /* If we don't have a movstr we don't want to emit an strcpy
11137 call. We have to do that if the length of the source string
11138 isn't computable (in that case we can use memcpy probably
11139 later expanding to a sequence of mov instructions). If we
11140 have movstr instructions we can emit strcpy calls. */
11143 tree len = c_strlen (src, 1);
11144 if (! len || TREE_SIDE_EFFECTS (len))
11148 /* Stabilize the argument list. */
11149 dst = builtin_save_expr (dst);
11151 /* Create strlen (dst). */
11152 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11153 /* Create (dst p+ strlen (dst)). */
11155 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11156 TREE_TYPE (dst), dst, newdst);
11157 newdst = builtin_save_expr (newdst);
11159 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11160 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11166 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11167 arguments to the call.
11169 Return NULL_TREE if no simplification was possible, otherwise return the
11170 simplified form of the call as a tree.
11172 The simplified form may be a constant or other expression which
11173 computes the same value, but in a more efficient manner (including
11174 calls to other builtin functions).
11176 The call may contain arguments which need to be evaluated, but
11177 which are not useful to determine the result of the call. In
11178 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11179 COMPOUND_EXPR will be an argument which must be evaluated.
11180 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11181 COMPOUND_EXPR in the chain will contain the tree for the simplified
11182 form of the builtin function call. */
11185 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11187 if (!validate_arg (dst, POINTER_TYPE)
11188 || !validate_arg (src, POINTER_TYPE)
11189 || !validate_arg (len, INTEGER_TYPE))
11193 const char *p = c_getstr (src);
11195 /* If the requested length is zero, or the src parameter string
11196 length is zero, return the dst parameter. */
11197 if (integer_zerop (len) || (p && *p == '\0'))
11198 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11200 /* If the requested len is greater than or equal to the string
11201 length, call strcat. */
11202 if (TREE_CODE (len) == INTEGER_CST && p
11203 && compare_tree_int (len, strlen (p)) >= 0)
11205 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11207 /* If the replacement _DECL isn't initialized, don't do the
11212 return build_call_expr_loc (loc, fn, 2, dst, src);
11218 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11221 Return NULL_TREE if no simplification was possible, otherwise return the
11222 simplified form of the call as a tree.
11224 The simplified form may be a constant or other expression which
11225 computes the same value, but in a more efficient manner (including
11226 calls to other builtin functions).
11228 The call may contain arguments which need to be evaluated, but
11229 which are not useful to determine the result of the call. In
11230 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11231 COMPOUND_EXPR will be an argument which must be evaluated.
11232 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11233 COMPOUND_EXPR in the chain will contain the tree for the simplified
11234 form of the builtin function call. */
11237 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11239 if (!validate_arg (s1, POINTER_TYPE)
11240 || !validate_arg (s2, POINTER_TYPE))
11244 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11246 /* If both arguments are constants, evaluate at compile-time. */
11249 const size_t r = strspn (p1, p2);
11250 return size_int (r);
11253 /* If either argument is "", return NULL_TREE. */
11254 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11255 /* Evaluate and ignore both arguments in case either one has
11257 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11263 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11266 Return NULL_TREE if no simplification was possible, otherwise return the
11267 simplified form of the call as a tree.
11269 The simplified form may be a constant or other expression which
11270 computes the same value, but in a more efficient manner (including
11271 calls to other builtin functions).
11273 The call may contain arguments which need to be evaluated, but
11274 which are not useful to determine the result of the call. In
11275 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11276 COMPOUND_EXPR will be an argument which must be evaluated.
11277 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11278 COMPOUND_EXPR in the chain will contain the tree for the simplified
11279 form of the builtin function call. */
11282 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11284 if (!validate_arg (s1, POINTER_TYPE)
11285 || !validate_arg (s2, POINTER_TYPE))
11289 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11291 /* If both arguments are constants, evaluate at compile-time. */
11294 const size_t r = strcspn (p1, p2);
11295 return size_int (r);
11298 /* If the first argument is "", return NULL_TREE. */
11299 if (p1 && *p1 == '\0')
11301 /* Evaluate and ignore argument s2 in case it has
11303 return omit_one_operand_loc (loc, size_type_node,
11304 size_zero_node, s2);
11307 /* If the second argument is "", return __builtin_strlen(s1). */
11308 if (p2 && *p2 == '\0')
11310 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11312 /* If the replacement _DECL isn't initialized, don't do the
11317 return build_call_expr_loc (loc, fn, 1, s1);
11323 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11324 to the call. IGNORE is true if the value returned
11325 by the builtin will be ignored. UNLOCKED is true is true if this
11326 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11327 the known length of the string. Return NULL_TREE if no simplification
11331 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11332 bool ignore, bool unlocked, tree len)
11334 /* If we're using an unlocked function, assume the other unlocked
11335 functions exist explicitly. */
11336 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11337 : implicit_built_in_decls[BUILT_IN_FPUTC];
11338 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11339 : implicit_built_in_decls[BUILT_IN_FWRITE];
11341 /* If the return value is used, don't do the transformation. */
11345 /* Verify the arguments in the original call. */
11346 if (!validate_arg (arg0, POINTER_TYPE)
11347 || !validate_arg (arg1, POINTER_TYPE))
11351 len = c_strlen (arg0, 0);
11353 /* Get the length of the string passed to fputs. If the length
11354 can't be determined, punt. */
11356 || TREE_CODE (len) != INTEGER_CST)
11359 switch (compare_tree_int (len, 1))
11361 case -1: /* length is 0, delete the call entirely . */
11362 return omit_one_operand_loc (loc, integer_type_node,
11363 integer_zero_node, arg1);;
11365 case 0: /* length is 1, call fputc. */
11367 const char *p = c_getstr (arg0);
11372 return build_call_expr_loc (loc, fn_fputc, 2,
11373 build_int_cst (NULL_TREE, p[0]), arg1);
11379 case 1: /* length is greater than 1, call fwrite. */
11381 /* If optimizing for size keep fputs. */
11382 if (optimize_function_for_size_p (cfun))
11384 /* New argument list transforming fputs(string, stream) to
11385 fwrite(string, 1, len, stream). */
11387 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11388 size_one_node, len, arg1);
11393 gcc_unreachable ();
11398 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11399 produced. False otherwise. This is done so that we don't output the error
11400 or warning twice or three times. */
11403 fold_builtin_next_arg (tree exp, bool va_start_p)
11405 tree fntype = TREE_TYPE (current_function_decl);
11406 int nargs = call_expr_nargs (exp);
11409 if (TYPE_ARG_TYPES (fntype) == 0
11410 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11411 == void_type_node))
11413 error ("%<va_start%> used in function with fixed args");
11419 if (va_start_p && (nargs != 2))
11421 error ("wrong number of arguments to function %<va_start%>");
11424 arg = CALL_EXPR_ARG (exp, 1);
11426 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11427 when we checked the arguments and if needed issued a warning. */
11432 /* Evidently an out of date version of <stdarg.h>; can't validate
11433 va_start's second argument, but can still work as intended. */
11434 warning (0, "%<__builtin_next_arg%> called without an argument");
11437 else if (nargs > 1)
11439 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11442 arg = CALL_EXPR_ARG (exp, 0);
11445 if (TREE_CODE (arg) == SSA_NAME)
11446 arg = SSA_NAME_VAR (arg);
11448 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11449 or __builtin_next_arg (0) the first time we see it, after checking
11450 the arguments and if needed issuing a warning. */
11451 if (!integer_zerop (arg))
11453 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11455 /* Strip off all nops for the sake of the comparison. This
11456 is not quite the same as STRIP_NOPS. It does more.
11457 We must also strip off INDIRECT_EXPR for C++ reference
11459 while (CONVERT_EXPR_P (arg)
11460 || TREE_CODE (arg) == INDIRECT_REF)
11461 arg = TREE_OPERAND (arg, 0);
11462 if (arg != last_parm)
11464 /* FIXME: Sometimes with the tree optimizers we can get the
11465 not the last argument even though the user used the last
11466 argument. We just warn and set the arg to be the last
11467 argument so that we will get wrong-code because of
11469 warning (0, "second parameter of %<va_start%> not last named argument");
11472 /* Undefined by C99 7.15.1.4p4 (va_start):
11473 "If the parameter parmN is declared with the register storage
11474 class, with a function or array type, or with a type that is
11475 not compatible with the type that results after application of
11476 the default argument promotions, the behavior is undefined."
11478 else if (DECL_REGISTER (arg))
11479 warning (0, "undefined behaviour when second parameter of "
11480 "%<va_start%> is declared with %<register%> storage");
11482 /* We want to verify the second parameter just once before the tree
11483 optimizers are run and then avoid keeping it in the tree,
11484 as otherwise we could warn even for correct code like:
11485 void foo (int i, ...)
11486 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11488 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11490 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11496 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11497 ORIG may be null if this is a 2-argument call. We don't attempt to
11498 simplify calls with more than 3 arguments.
11500 Return NULL_TREE if no simplification was possible, otherwise return the
11501 simplified form of the call as a tree. If IGNORED is true, it means that
11502 the caller does not use the returned value of the function. */
11505 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11506 tree orig, int ignored)
11509 const char *fmt_str = NULL;
11511 /* Verify the required arguments in the original call. We deal with two
11512 types of sprintf() calls: 'sprintf (str, fmt)' and
11513 'sprintf (dest, "%s", orig)'. */
11514 if (!validate_arg (dest, POINTER_TYPE)
11515 || !validate_arg (fmt, POINTER_TYPE))
11517 if (orig && !validate_arg (orig, POINTER_TYPE))
11520 /* Check whether the format is a literal string constant. */
11521 fmt_str = c_getstr (fmt);
11522 if (fmt_str == NULL)
11526 retval = NULL_TREE;
11528 if (!init_target_chars ())
11531 /* If the format doesn't contain % args or %%, use strcpy. */
11532 if (strchr (fmt_str, target_percent) == NULL)
11534 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11539 /* Don't optimize sprintf (buf, "abc", ptr++). */
11543 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11544 'format' is known to contain no % formats. */
11545 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11547 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11550 /* If the format is "%s", use strcpy if the result isn't used. */
11551 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11554 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11559 /* Don't crash on sprintf (str1, "%s"). */
11563 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11566 retval = c_strlen (orig, 1);
11567 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11570 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11573 if (call && retval)
11575 retval = fold_convert_loc
11576 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11578 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11584 /* Expand a call EXP to __builtin_object_size. */
11587 expand_builtin_object_size (tree exp)
11590 int object_size_type;
11591 tree fndecl = get_callee_fndecl (exp);
11593 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11595 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11597 expand_builtin_trap ();
11601 ost = CALL_EXPR_ARG (exp, 1);
11604 if (TREE_CODE (ost) != INTEGER_CST
11605 || tree_int_cst_sgn (ost) < 0
11606 || compare_tree_int (ost, 3) > 0)
11608 error ("%Klast argument of %D is not integer constant between 0 and 3",
11610 expand_builtin_trap ();
11614 object_size_type = tree_low_cst (ost, 0);
11616 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11619 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11620 FCODE is the BUILT_IN_* to use.
11621 Return NULL_RTX if we failed; the caller should emit a normal call,
11622 otherwise try to get the result in TARGET, if convenient (and in
11623 mode MODE if that's convenient). */
11626 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11627 enum built_in_function fcode)
11629 tree dest, src, len, size;
11631 if (!validate_arglist (exp,
11633 fcode == BUILT_IN_MEMSET_CHK
11634 ? INTEGER_TYPE : POINTER_TYPE,
11635 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11638 dest = CALL_EXPR_ARG (exp, 0);
11639 src = CALL_EXPR_ARG (exp, 1);
11640 len = CALL_EXPR_ARG (exp, 2);
11641 size = CALL_EXPR_ARG (exp, 3);
11643 if (! host_integerp (size, 1))
11646 if (host_integerp (len, 1) || integer_all_onesp (size))
11650 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11652 warning_at (tree_nonartificial_location (exp),
11653 0, "%Kcall to %D will always overflow destination buffer",
11654 exp, get_callee_fndecl (exp));
11659 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11660 mem{cpy,pcpy,move,set} is available. */
11663 case BUILT_IN_MEMCPY_CHK:
11664 fn = built_in_decls[BUILT_IN_MEMCPY];
11666 case BUILT_IN_MEMPCPY_CHK:
11667 fn = built_in_decls[BUILT_IN_MEMPCPY];
11669 case BUILT_IN_MEMMOVE_CHK:
11670 fn = built_in_decls[BUILT_IN_MEMMOVE];
11672 case BUILT_IN_MEMSET_CHK:
11673 fn = built_in_decls[BUILT_IN_MEMSET];
11682 fn = build_call_nofold (fn, 3, dest, src, len);
11683 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11684 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11685 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11687 else if (fcode == BUILT_IN_MEMSET_CHK)
11691 unsigned int dest_align
11692 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11694 /* If DEST is not a pointer type, call the normal function. */
11695 if (dest_align == 0)
11698 /* If SRC and DEST are the same (and not volatile), do nothing. */
11699 if (operand_equal_p (src, dest, 0))
11703 if (fcode != BUILT_IN_MEMPCPY_CHK)
11705 /* Evaluate and ignore LEN in case it has side-effects. */
11706 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11707 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11710 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11711 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11714 /* __memmove_chk special case. */
11715 if (fcode == BUILT_IN_MEMMOVE_CHK)
11717 unsigned int src_align
11718 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11720 if (src_align == 0)
11723 /* If src is categorized for a readonly section we can use
11724 normal __memcpy_chk. */
11725 if (readonly_data_expr (src))
11727 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11730 fn = build_call_nofold (fn, 4, dest, src, len, size);
11731 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11732 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11733 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11740 /* Emit warning if a buffer overflow is detected at compile time. */
11743 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11747 location_t loc = tree_nonartificial_location (exp);
11751 case BUILT_IN_STRCPY_CHK:
11752 case BUILT_IN_STPCPY_CHK:
11753 /* For __strcat_chk the warning will be emitted only if overflowing
11754 by at least strlen (dest) + 1 bytes. */
11755 case BUILT_IN_STRCAT_CHK:
11756 len = CALL_EXPR_ARG (exp, 1);
11757 size = CALL_EXPR_ARG (exp, 2);
11760 case BUILT_IN_STRNCAT_CHK:
11761 case BUILT_IN_STRNCPY_CHK:
11762 len = CALL_EXPR_ARG (exp, 2);
11763 size = CALL_EXPR_ARG (exp, 3);
11765 case BUILT_IN_SNPRINTF_CHK:
11766 case BUILT_IN_VSNPRINTF_CHK:
11767 len = CALL_EXPR_ARG (exp, 1);
11768 size = CALL_EXPR_ARG (exp, 3);
11771 gcc_unreachable ();
11777 if (! host_integerp (size, 1) || integer_all_onesp (size))
11782 len = c_strlen (len, 1);
11783 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11786 else if (fcode == BUILT_IN_STRNCAT_CHK)
11788 tree src = CALL_EXPR_ARG (exp, 1);
11789 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11791 src = c_strlen (src, 1);
11792 if (! src || ! host_integerp (src, 1))
11794 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11795 exp, get_callee_fndecl (exp));
11798 else if (tree_int_cst_lt (src, size))
11801 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11804 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11805 exp, get_callee_fndecl (exp));
11808 /* Emit warning if a buffer overflow is detected at compile time
11809 in __sprintf_chk/__vsprintf_chk calls. */
11812 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11814 tree size, len, fmt;
11815 const char *fmt_str;
11816 int nargs = call_expr_nargs (exp);
11818 /* Verify the required arguments in the original call. */
11822 size = CALL_EXPR_ARG (exp, 2);
11823 fmt = CALL_EXPR_ARG (exp, 3);
11825 if (! host_integerp (size, 1) || integer_all_onesp (size))
11828 /* Check whether the format is a literal string constant. */
11829 fmt_str = c_getstr (fmt);
11830 if (fmt_str == NULL)
11833 if (!init_target_chars ())
11836 /* If the format doesn't contain % args or %%, we know its size. */
11837 if (strchr (fmt_str, target_percent) == 0)
11838 len = build_int_cstu (size_type_node, strlen (fmt_str));
11839 /* If the format is "%s" and first ... argument is a string literal,
11841 else if (fcode == BUILT_IN_SPRINTF_CHK
11842 && strcmp (fmt_str, target_percent_s) == 0)
11848 arg = CALL_EXPR_ARG (exp, 4);
11849 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11852 len = c_strlen (arg, 1);
11853 if (!len || ! host_integerp (len, 1))
11859 if (! tree_int_cst_lt (len, size))
11860 warning_at (tree_nonartificial_location (exp),
11861 0, "%Kcall to %D will always overflow destination buffer",
11862 exp, get_callee_fndecl (exp));
11865 /* Emit warning if a free is called with address of a variable. */
11868 maybe_emit_free_warning (tree exp)
11870 tree arg = CALL_EXPR_ARG (exp, 0);
11873 if (TREE_CODE (arg) != ADDR_EXPR)
11876 arg = get_base_address (TREE_OPERAND (arg, 0));
11877 if (arg == NULL || INDIRECT_REF_P (arg))
11880 if (SSA_VAR_P (arg))
11881 warning_at (tree_nonartificial_location (exp),
11882 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11884 warning_at (tree_nonartificial_location (exp),
11885 0, "%Kattempt to free a non-heap object", exp);
11888 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11892 fold_builtin_object_size (tree ptr, tree ost)
11894 tree ret = NULL_TREE;
11895 int object_size_type;
11897 if (!validate_arg (ptr, POINTER_TYPE)
11898 || !validate_arg (ost, INTEGER_TYPE))
11903 if (TREE_CODE (ost) != INTEGER_CST
11904 || tree_int_cst_sgn (ost) < 0
11905 || compare_tree_int (ost, 3) > 0)
11908 object_size_type = tree_low_cst (ost, 0);
11910 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11911 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11912 and (size_t) 0 for types 2 and 3. */
11913 if (TREE_SIDE_EFFECTS (ptr))
11914 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11916 if (TREE_CODE (ptr) == ADDR_EXPR)
11917 ret = build_int_cstu (size_type_node,
11918 compute_builtin_object_size (ptr, object_size_type));
11920 else if (TREE_CODE (ptr) == SSA_NAME)
11922 unsigned HOST_WIDE_INT bytes;
11924 /* If object size is not known yet, delay folding until
11925 later. Maybe subsequent passes will help determining
11927 bytes = compute_builtin_object_size (ptr, object_size_type);
11928 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11930 ret = build_int_cstu (size_type_node, bytes);
11935 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11936 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11937 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11944 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11945 DEST, SRC, LEN, and SIZE are the arguments to the call.
11946 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11947 code of the builtin. If MAXLEN is not NULL, it is maximum length
11948 passed as third argument. */
11951 fold_builtin_memory_chk (location_t loc, tree fndecl,
11952 tree dest, tree src, tree len, tree size,
11953 tree maxlen, bool ignore,
11954 enum built_in_function fcode)
11958 if (!validate_arg (dest, POINTER_TYPE)
11959 || !validate_arg (src,
11960 (fcode == BUILT_IN_MEMSET_CHK
11961 ? INTEGER_TYPE : POINTER_TYPE))
11962 || !validate_arg (len, INTEGER_TYPE)
11963 || !validate_arg (size, INTEGER_TYPE))
11966 /* If SRC and DEST are the same (and not volatile), return DEST
11967 (resp. DEST+LEN for __mempcpy_chk). */
11968 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11970 if (fcode != BUILT_IN_MEMPCPY_CHK)
11971 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11975 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11977 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11981 if (! host_integerp (size, 1))
11984 if (! integer_all_onesp (size))
11986 if (! host_integerp (len, 1))
11988 /* If LEN is not constant, try MAXLEN too.
11989 For MAXLEN only allow optimizing into non-_ocs function
11990 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11991 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11993 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11995 /* (void) __mempcpy_chk () can be optimized into
11996 (void) __memcpy_chk (). */
11997 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12001 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12009 if (tree_int_cst_lt (size, maxlen))
12014 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12015 mem{cpy,pcpy,move,set} is available. */
12018 case BUILT_IN_MEMCPY_CHK:
12019 fn = built_in_decls[BUILT_IN_MEMCPY];
12021 case BUILT_IN_MEMPCPY_CHK:
12022 fn = built_in_decls[BUILT_IN_MEMPCPY];
12024 case BUILT_IN_MEMMOVE_CHK:
12025 fn = built_in_decls[BUILT_IN_MEMMOVE];
12027 case BUILT_IN_MEMSET_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMSET];
12037 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12040 /* Fold a call to the __st[rp]cpy_chk builtin.
12041 DEST, SRC, and SIZE are the arguments to the call.
12042 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12043 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12044 strings passed as second argument. */
12047 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12048 tree src, tree size,
12049 tree maxlen, bool ignore,
12050 enum built_in_function fcode)
12054 if (!validate_arg (dest, POINTER_TYPE)
12055 || !validate_arg (src, POINTER_TYPE)
12056 || !validate_arg (size, INTEGER_TYPE))
12059 /* If SRC and DEST are the same (and not volatile), return DEST. */
12060 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12061 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12063 if (! host_integerp (size, 1))
12066 if (! integer_all_onesp (size))
12068 len = c_strlen (src, 1);
12069 if (! len || ! host_integerp (len, 1))
12071 /* If LEN is not constant, try MAXLEN too.
12072 For MAXLEN only allow optimizing into non-_ocs function
12073 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12074 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12076 if (fcode == BUILT_IN_STPCPY_CHK)
12081 /* If return value of __stpcpy_chk is ignored,
12082 optimize into __strcpy_chk. */
12083 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12087 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12090 if (! len || TREE_SIDE_EFFECTS (len))
12093 /* If c_strlen returned something, but not a constant,
12094 transform __strcpy_chk into __memcpy_chk. */
12095 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12099 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12100 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12101 build_call_expr_loc (loc, fn, 4,
12102 dest, src, len, size));
12108 if (! tree_int_cst_lt (maxlen, size))
12112 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12113 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12114 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12118 return build_call_expr_loc (loc, fn, 2, dest, src);
12121 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12122 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12123 length passed as third argument. */
12126 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12127 tree len, tree size, tree maxlen)
12131 if (!validate_arg (dest, POINTER_TYPE)
12132 || !validate_arg (src, POINTER_TYPE)
12133 || !validate_arg (len, INTEGER_TYPE)
12134 || !validate_arg (size, INTEGER_TYPE))
12137 if (! host_integerp (size, 1))
12140 if (! integer_all_onesp (size))
12142 if (! host_integerp (len, 1))
12144 /* If LEN is not constant, try MAXLEN too.
12145 For MAXLEN only allow optimizing into non-_ocs function
12146 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12147 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12153 if (tree_int_cst_lt (size, maxlen))
12157 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12158 fn = built_in_decls[BUILT_IN_STRNCPY];
12162 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12165 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12166 are the arguments to the call. */
12169 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12170 tree src, tree size)
12175 if (!validate_arg (dest, POINTER_TYPE)
12176 || !validate_arg (src, POINTER_TYPE)
12177 || !validate_arg (size, INTEGER_TYPE))
12180 p = c_getstr (src);
12181 /* If the SRC parameter is "", return DEST. */
12182 if (p && *p == '\0')
12183 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12185 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12188 /* If __builtin_strcat_chk is used, assume strcat is available. */
12189 fn = built_in_decls[BUILT_IN_STRCAT];
12193 return build_call_expr_loc (loc, fn, 2, dest, src);
12196 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12200 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12201 tree dest, tree src, tree len, tree size)
12206 if (!validate_arg (dest, POINTER_TYPE)
12207 || !validate_arg (src, POINTER_TYPE)
12208 || !validate_arg (size, INTEGER_TYPE)
12209 || !validate_arg (size, INTEGER_TYPE))
12212 p = c_getstr (src);
12213 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12214 if (p && *p == '\0')
12215 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12216 else if (integer_zerop (len))
12217 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12219 if (! host_integerp (size, 1))
12222 if (! integer_all_onesp (size))
12224 tree src_len = c_strlen (src, 1);
12226 && host_integerp (src_len, 1)
12227 && host_integerp (len, 1)
12228 && ! tree_int_cst_lt (len, src_len))
12230 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12231 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12235 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12240 /* If __builtin_strncat_chk is used, assume strncat is available. */
12241 fn = built_in_decls[BUILT_IN_STRNCAT];
12245 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12248 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12249 a normal call should be emitted rather than expanding the function
12250 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12253 fold_builtin_sprintf_chk (location_t loc, tree exp,
12254 enum built_in_function fcode)
12256 tree dest, size, len, fn, fmt, flag;
12257 const char *fmt_str;
12258 int nargs = call_expr_nargs (exp);
12260 /* Verify the required arguments in the original call. */
12263 dest = CALL_EXPR_ARG (exp, 0);
12264 if (!validate_arg (dest, POINTER_TYPE))
12266 flag = CALL_EXPR_ARG (exp, 1);
12267 if (!validate_arg (flag, INTEGER_TYPE))
12269 size = CALL_EXPR_ARG (exp, 2);
12270 if (!validate_arg (size, INTEGER_TYPE))
12272 fmt = CALL_EXPR_ARG (exp, 3);
12273 if (!validate_arg (fmt, POINTER_TYPE))
12276 if (! host_integerp (size, 1))
12281 if (!init_target_chars ())
12284 /* Check whether the format is a literal string constant. */
12285 fmt_str = c_getstr (fmt);
12286 if (fmt_str != NULL)
12288 /* If the format doesn't contain % args or %%, we know the size. */
12289 if (strchr (fmt_str, target_percent) == 0)
12291 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12292 len = build_int_cstu (size_type_node, strlen (fmt_str));
12294 /* If the format is "%s" and first ... argument is a string literal,
12295 we know the size too. */
12296 else if (fcode == BUILT_IN_SPRINTF_CHK
12297 && strcmp (fmt_str, target_percent_s) == 0)
12303 arg = CALL_EXPR_ARG (exp, 4);
12304 if (validate_arg (arg, POINTER_TYPE))
12306 len = c_strlen (arg, 1);
12307 if (! len || ! host_integerp (len, 1))
12314 if (! integer_all_onesp (size))
12316 if (! len || ! tree_int_cst_lt (len, size))
12320 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12321 or if format doesn't contain % chars or is "%s". */
12322 if (! integer_zerop (flag))
12324 if (fmt_str == NULL)
12326 if (strchr (fmt_str, target_percent) != NULL
12327 && strcmp (fmt_str, target_percent_s))
12331 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12332 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12333 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12337 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12340 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12341 a normal call should be emitted rather than expanding the function
12342 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12343 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12344 passed as second argument. */
12347 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12348 enum built_in_function fcode)
12350 tree dest, size, len, fn, fmt, flag;
12351 const char *fmt_str;
12353 /* Verify the required arguments in the original call. */
12354 if (call_expr_nargs (exp) < 5)
12356 dest = CALL_EXPR_ARG (exp, 0);
12357 if (!validate_arg (dest, POINTER_TYPE))
12359 len = CALL_EXPR_ARG (exp, 1);
12360 if (!validate_arg (len, INTEGER_TYPE))
12362 flag = CALL_EXPR_ARG (exp, 2);
12363 if (!validate_arg (flag, INTEGER_TYPE))
12365 size = CALL_EXPR_ARG (exp, 3);
12366 if (!validate_arg (size, INTEGER_TYPE))
12368 fmt = CALL_EXPR_ARG (exp, 4);
12369 if (!validate_arg (fmt, POINTER_TYPE))
12372 if (! host_integerp (size, 1))
12375 if (! integer_all_onesp (size))
12377 if (! host_integerp (len, 1))
12379 /* If LEN is not constant, try MAXLEN too.
12380 For MAXLEN only allow optimizing into non-_ocs function
12381 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12382 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12388 if (tree_int_cst_lt (size, maxlen))
12392 if (!init_target_chars ())
12395 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12396 or if format doesn't contain % chars or is "%s". */
12397 if (! integer_zerop (flag))
12399 fmt_str = c_getstr (fmt);
12400 if (fmt_str == NULL)
12402 if (strchr (fmt_str, target_percent) != NULL
12403 && strcmp (fmt_str, target_percent_s))
12407 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12409 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12410 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12414 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12417 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12418 FMT and ARG are the arguments to the call; we don't fold cases with
12419 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12421 Return NULL_TREE if no simplification was possible, otherwise return the
12422 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12423 code of the function to be simplified. */
12426 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12427 tree arg, bool ignore,
12428 enum built_in_function fcode)
12430 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12431 const char *fmt_str = NULL;
12433 /* If the return value is used, don't do the transformation. */
12437 /* Verify the required arguments in the original call. */
12438 if (!validate_arg (fmt, POINTER_TYPE))
12441 /* Check whether the format is a literal string constant. */
12442 fmt_str = c_getstr (fmt);
12443 if (fmt_str == NULL)
12446 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12448 /* If we're using an unlocked function, assume the other
12449 unlocked functions exist explicitly. */
12450 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12451 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12455 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12456 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12459 if (!init_target_chars ())
12462 if (strcmp (fmt_str, target_percent_s) == 0
12463 || strchr (fmt_str, target_percent) == NULL)
12467 if (strcmp (fmt_str, target_percent_s) == 0)
12469 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12472 if (!arg || !validate_arg (arg, POINTER_TYPE))
12475 str = c_getstr (arg);
12481 /* The format specifier doesn't contain any '%' characters. */
12482 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12488 /* If the string was "", printf does nothing. */
12489 if (str[0] == '\0')
12490 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12492 /* If the string has length of 1, call putchar. */
12493 if (str[1] == '\0')
12495 /* Given printf("c"), (where c is any one character,)
12496 convert "c"[0] to an int and pass that to the replacement
12498 newarg = build_int_cst (NULL_TREE, str[0]);
12500 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12504 /* If the string was "string\n", call puts("string"). */
12505 size_t len = strlen (str);
12506 if ((unsigned char)str[len - 1] == target_newline)
12508 /* Create a NUL-terminated string that's one char shorter
12509 than the original, stripping off the trailing '\n'. */
12510 char *newstr = XALLOCAVEC (char, len);
12511 memcpy (newstr, str, len - 1);
12512 newstr[len - 1] = 0;
12514 newarg = build_string_literal (len, newstr);
12516 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12519 /* We'd like to arrange to call fputs(string,stdout) here,
12520 but we need stdout and don't have a way to get it yet. */
12525 /* The other optimizations can be done only on the non-va_list variants. */
12526 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12529 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12530 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12532 if (!arg || !validate_arg (arg, POINTER_TYPE))
12535 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12538 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12539 else if (strcmp (fmt_str, target_percent_c) == 0)
12541 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12544 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12550 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12553 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12554 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12555 more than 3 arguments, and ARG may be null in the 2-argument case.
12557 Return NULL_TREE if no simplification was possible, otherwise return the
12558 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12559 code of the function to be simplified. */
12562 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12563 tree fmt, tree arg, bool ignore,
12564 enum built_in_function fcode)
12566 tree fn_fputc, fn_fputs, call = NULL_TREE;
12567 const char *fmt_str = NULL;
12569 /* If the return value is used, don't do the transformation. */
12573 /* Verify the required arguments in the original call. */
12574 if (!validate_arg (fp, POINTER_TYPE))
12576 if (!validate_arg (fmt, POINTER_TYPE))
12579 /* Check whether the format is a literal string constant. */
12580 fmt_str = c_getstr (fmt);
12581 if (fmt_str == NULL)
12584 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12586 /* If we're using an unlocked function, assume the other
12587 unlocked functions exist explicitly. */
12588 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12589 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12593 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12594 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12597 if (!init_target_chars ())
12600 /* If the format doesn't contain % args or %%, use strcpy. */
12601 if (strchr (fmt_str, target_percent) == NULL)
12603 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12607 /* If the format specifier was "", fprintf does nothing. */
12608 if (fmt_str[0] == '\0')
12610 /* If FP has side-effects, just wait until gimplification is
12612 if (TREE_SIDE_EFFECTS (fp))
12615 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12618 /* When "string" doesn't contain %, replace all cases of
12619 fprintf (fp, string) with fputs (string, fp). The fputs
12620 builtin will take care of special cases like length == 1. */
12622 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12625 /* The other optimizations can be done only on the non-va_list variants. */
12626 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12629 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12630 else if (strcmp (fmt_str, target_percent_s) == 0)
12632 if (!arg || !validate_arg (arg, POINTER_TYPE))
12635 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12638 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12639 else if (strcmp (fmt_str, target_percent_c) == 0)
12641 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12644 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12649 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12652 /* Initialize format string characters in the target charset. */
12655 init_target_chars (void)
12660 target_newline = lang_hooks.to_target_charset ('\n');
12661 target_percent = lang_hooks.to_target_charset ('%');
12662 target_c = lang_hooks.to_target_charset ('c');
12663 target_s = lang_hooks.to_target_charset ('s');
12664 if (target_newline == 0 || target_percent == 0 || target_c == 0
12668 target_percent_c[0] = target_percent;
12669 target_percent_c[1] = target_c;
12670 target_percent_c[2] = '\0';
12672 target_percent_s[0] = target_percent;
12673 target_percent_s[1] = target_s;
12674 target_percent_s[2] = '\0';
12676 target_percent_s_newline[0] = target_percent;
12677 target_percent_s_newline[1] = target_s;
12678 target_percent_s_newline[2] = target_newline;
12679 target_percent_s_newline[3] = '\0';
12686 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12687 and no overflow/underflow occurred. INEXACT is true if M was not
12688 exactly calculated. TYPE is the tree type for the result. This
12689 function assumes that you cleared the MPFR flags and then
12690 calculated M to see if anything subsequently set a flag prior to
12691 entering this function. Return NULL_TREE if any checks fail. */
12694 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12696 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12697 overflow/underflow occurred. If -frounding-math, proceed iff the
12698 result of calling FUNC was exact. */
12699 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12700 && (!flag_rounding_math || !inexact))
12702 REAL_VALUE_TYPE rr;
12704 real_from_mpfr (&rr, m, type, GMP_RNDN);
12705 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12706 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12707 but the mpft_t is not, then we underflowed in the
12709 if (real_isfinite (&rr)
12710 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12712 REAL_VALUE_TYPE rmode;
12714 real_convert (&rmode, TYPE_MODE (type), &rr);
12715 /* Proceed iff the specified mode can hold the value. */
12716 if (real_identical (&rmode, &rr))
12717 return build_real (type, rmode);
12723 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12724 number and no overflow/underflow occurred. INEXACT is true if M
12725 was not exactly calculated. TYPE is the tree type for the result.
12726 This function assumes that you cleared the MPFR flags and then
12727 calculated M to see if anything subsequently set a flag prior to
12728 entering this function. Return NULL_TREE if any checks fail, if
12729 FORCE_CONVERT is true, then bypass the checks. */
12732 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12734 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12735 overflow/underflow occurred. If -frounding-math, proceed iff the
12736 result of calling FUNC was exact. */
12738 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12739 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12740 && (!flag_rounding_math || !inexact)))
12742 REAL_VALUE_TYPE re, im;
12744 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12745 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12746 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12747 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12748 but the mpft_t is not, then we underflowed in the
12751 || (real_isfinite (&re) && real_isfinite (&im)
12752 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12753 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12755 REAL_VALUE_TYPE re_mode, im_mode;
12757 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12758 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12759 /* Proceed iff the specified mode can hold the value. */
12761 || (real_identical (&re_mode, &re)
12762 && real_identical (&im_mode, &im)))
12763 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12764 build_real (TREE_TYPE (type), im_mode));
12770 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12771 FUNC on it and return the resulting value as a tree with type TYPE.
12772 If MIN and/or MAX are not NULL, then the supplied ARG must be
12773 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12774 acceptable values, otherwise they are not. The mpfr precision is
12775 set to the precision of TYPE. We assume that function FUNC returns
12776 zero if the result could be calculated exactly within the requested
12780 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12781 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12784 tree result = NULL_TREE;
12788 /* To proceed, MPFR must exactly represent the target floating point
12789 format, which only happens when the target base equals two. */
12790 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12791 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12793 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12795 if (real_isfinite (ra)
12796 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12797 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12799 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12800 const int prec = fmt->p;
12801 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12805 mpfr_init2 (m, prec);
12806 mpfr_from_real (m, ra, GMP_RNDN);
12807 mpfr_clear_flags ();
12808 inexact = func (m, m, rnd);
12809 result = do_mpfr_ckconv (m, type, inexact);
12817 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12818 FUNC on it and return the resulting value as a tree with type TYPE.
12819 The mpfr precision is set to the precision of TYPE. We assume that
12820 function FUNC returns zero if the result could be calculated
12821 exactly within the requested precision. */
12824 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12825 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12827 tree result = NULL_TREE;
12832 /* To proceed, MPFR must exactly represent the target floating point
12833 format, which only happens when the target base equals two. */
12834 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12835 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12836 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12838 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12839 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12841 if (real_isfinite (ra1) && real_isfinite (ra2))
12843 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12844 const int prec = fmt->p;
12845 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12849 mpfr_inits2 (prec, m1, m2, NULL);
12850 mpfr_from_real (m1, ra1, GMP_RNDN);
12851 mpfr_from_real (m2, ra2, GMP_RNDN);
12852 mpfr_clear_flags ();
12853 inexact = func (m1, m1, m2, rnd);
12854 result = do_mpfr_ckconv (m1, type, inexact);
12855 mpfr_clears (m1, m2, NULL);
12862 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12863 FUNC on it and return the resulting value as a tree with type TYPE.
12864 The mpfr precision is set to the precision of TYPE. We assume that
12865 function FUNC returns zero if the result could be calculated
12866 exactly within the requested precision. */
12869 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12870 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12872 tree result = NULL_TREE;
12878 /* To proceed, MPFR must exactly represent the target floating point
12879 format, which only happens when the target base equals two. */
12880 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12881 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12882 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12883 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12885 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12886 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12887 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12889 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12891 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12892 const int prec = fmt->p;
12893 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12897 mpfr_inits2 (prec, m1, m2, m3, NULL);
12898 mpfr_from_real (m1, ra1, GMP_RNDN);
12899 mpfr_from_real (m2, ra2, GMP_RNDN);
12900 mpfr_from_real (m3, ra3, GMP_RNDN);
12901 mpfr_clear_flags ();
12902 inexact = func (m1, m1, m2, m3, rnd);
12903 result = do_mpfr_ckconv (m1, type, inexact);
12904 mpfr_clears (m1, m2, m3, NULL);
12911 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12912 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12913 If ARG_SINP and ARG_COSP are NULL then the result is returned
12914 as a complex value.
12915 The type is taken from the type of ARG and is used for setting the
12916 precision of the calculation and results. */
12919 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12921 tree const type = TREE_TYPE (arg);
12922 tree result = NULL_TREE;
12926 /* To proceed, MPFR must exactly represent the target floating point
12927 format, which only happens when the target base equals two. */
12928 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12929 && TREE_CODE (arg) == REAL_CST
12930 && !TREE_OVERFLOW (arg))
12932 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12934 if (real_isfinite (ra))
12936 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12937 const int prec = fmt->p;
12938 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12939 tree result_s, result_c;
12943 mpfr_inits2 (prec, m, ms, mc, NULL);
12944 mpfr_from_real (m, ra, GMP_RNDN);
12945 mpfr_clear_flags ();
12946 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12947 result_s = do_mpfr_ckconv (ms, type, inexact);
12948 result_c = do_mpfr_ckconv (mc, type, inexact);
12949 mpfr_clears (m, ms, mc, NULL);
12950 if (result_s && result_c)
12952 /* If we are to return in a complex value do so. */
12953 if (!arg_sinp && !arg_cosp)
12954 return build_complex (build_complex_type (type),
12955 result_c, result_s);
12957 /* Dereference the sin/cos pointer arguments. */
12958 arg_sinp = build_fold_indirect_ref (arg_sinp);
12959 arg_cosp = build_fold_indirect_ref (arg_cosp);
12960 /* Proceed if valid pointer type were passed in. */
12961 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12962 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12964 /* Set the values. */
12965 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12967 TREE_SIDE_EFFECTS (result_s) = 1;
12968 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12970 TREE_SIDE_EFFECTS (result_c) = 1;
12971 /* Combine the assignments into a compound expr. */
12972 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12973 result_s, result_c));
12981 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12982 two-argument mpfr order N Bessel function FUNC on them and return
12983 the resulting value as a tree with type TYPE. The mpfr precision
12984 is set to the precision of TYPE. We assume that function FUNC
12985 returns zero if the result could be calculated exactly within the
12986 requested precision. */
12988 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12989 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12990 const REAL_VALUE_TYPE *min, bool inclusive)
12992 tree result = NULL_TREE;
12997 /* To proceed, MPFR must exactly represent the target floating point
12998 format, which only happens when the target base equals two. */
12999 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13000 && host_integerp (arg1, 0)
13001 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13003 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13004 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13007 && real_isfinite (ra)
13008 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13011 const int prec = fmt->p;
13012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13016 mpfr_init2 (m, prec);
13017 mpfr_from_real (m, ra, GMP_RNDN);
13018 mpfr_clear_flags ();
13019 inexact = func (m, n, m, rnd);
13020 result = do_mpfr_ckconv (m, type, inexact);
13028 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13029 the pointer *(ARG_QUO) and return the result. The type is taken
13030 from the type of ARG0 and is used for setting the precision of the
13031 calculation and results. */
13034 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13036 tree const type = TREE_TYPE (arg0);
13037 tree result = NULL_TREE;
13042 /* To proceed, MPFR must exactly represent the target floating point
13043 format, which only happens when the target base equals two. */
13044 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13045 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13046 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13048 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13049 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13051 if (real_isfinite (ra0) && real_isfinite (ra1))
13053 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13054 const int prec = fmt->p;
13055 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13060 mpfr_inits2 (prec, m0, m1, NULL);
13061 mpfr_from_real (m0, ra0, GMP_RNDN);
13062 mpfr_from_real (m1, ra1, GMP_RNDN);
13063 mpfr_clear_flags ();
13064 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13065 /* Remquo is independent of the rounding mode, so pass
13066 inexact=0 to do_mpfr_ckconv(). */
13067 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13068 mpfr_clears (m0, m1, NULL);
13071 /* MPFR calculates quo in the host's long so it may
13072 return more bits in quo than the target int can hold
13073 if sizeof(host long) > sizeof(target int). This can
13074 happen even for native compilers in LP64 mode. In
13075 these cases, modulo the quo value with the largest
13076 number that the target int can hold while leaving one
13077 bit for the sign. */
13078 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13079 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13081 /* Dereference the quo pointer argument. */
13082 arg_quo = build_fold_indirect_ref (arg_quo);
13083 /* Proceed iff a valid pointer type was passed in. */
13084 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13086 /* Set the value. */
13087 tree result_quo = fold_build2 (MODIFY_EXPR,
13088 TREE_TYPE (arg_quo), arg_quo,
13089 build_int_cst (NULL, integer_quo));
13090 TREE_SIDE_EFFECTS (result_quo) = 1;
13091 /* Combine the quo assignment with the rem. */
13092 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13093 result_quo, result_rem));
13101 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13102 resulting value as a tree with type TYPE. The mpfr precision is
13103 set to the precision of TYPE. We assume that this mpfr function
13104 returns zero if the result could be calculated exactly within the
13105 requested precision. In addition, the integer pointer represented
13106 by ARG_SG will be dereferenced and set to the appropriate signgam
13110 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13112 tree result = NULL_TREE;
13116 /* To proceed, MPFR must exactly represent the target floating point
13117 format, which only happens when the target base equals two. Also
13118 verify ARG is a constant and that ARG_SG is an int pointer. */
13119 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13120 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13121 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13122 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13124 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13126 /* In addition to NaN and Inf, the argument cannot be zero or a
13127 negative integer. */
13128 if (real_isfinite (ra)
13129 && ra->cl != rvc_zero
13130 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13132 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13133 const int prec = fmt->p;
13134 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13139 mpfr_init2 (m, prec);
13140 mpfr_from_real (m, ra, GMP_RNDN);
13141 mpfr_clear_flags ();
13142 inexact = mpfr_lgamma (m, &sg, m, rnd);
13143 result_lg = do_mpfr_ckconv (m, type, inexact);
13149 /* Dereference the arg_sg pointer argument. */
13150 arg_sg = build_fold_indirect_ref (arg_sg);
13151 /* Assign the signgam value into *arg_sg. */
13152 result_sg = fold_build2 (MODIFY_EXPR,
13153 TREE_TYPE (arg_sg), arg_sg,
13154 build_int_cst (NULL, sg));
13155 TREE_SIDE_EFFECTS (result_sg) = 1;
13156 /* Combine the signgam assignment with the lgamma result. */
13157 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13158 result_sg, result_lg));
13166 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13167 function FUNC on it and return the resulting value as a tree with
13168 type TYPE. The mpfr precision is set to the precision of TYPE. We
13169 assume that function FUNC returns zero if the result could be
13170 calculated exactly within the requested precision. */
13173 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13175 tree result = NULL_TREE;
13179 /* To proceed, MPFR must exactly represent the target floating point
13180 format, which only happens when the target base equals two. */
13181 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13182 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13183 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13185 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13186 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13188 if (real_isfinite (re) && real_isfinite (im))
13190 const struct real_format *const fmt =
13191 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13192 const int prec = fmt->p;
13193 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13194 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13198 mpc_init2 (m, prec);
13199 mpfr_from_real (mpc_realref(m), re, rnd);
13200 mpfr_from_real (mpc_imagref(m), im, rnd);
13201 mpfr_clear_flags ();
13202 inexact = func (m, m, crnd);
13203 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13211 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13212 mpc function FUNC on it and return the resulting value as a tree
13213 with type TYPE. The mpfr precision is set to the precision of
13214 TYPE. We assume that function FUNC returns zero if the result
13215 could be calculated exactly within the requested precision. If
13216 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13217 in the arguments and/or results. */
13220 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13221 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13223 tree result = NULL_TREE;
13228 /* To proceed, MPFR must exactly represent the target floating point
13229 format, which only happens when the target base equals two. */
13230 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13231 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13232 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13234 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13236 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13237 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13238 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13239 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13242 || (real_isfinite (re0) && real_isfinite (im0)
13243 && real_isfinite (re1) && real_isfinite (im1)))
13245 const struct real_format *const fmt =
13246 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13247 const int prec = fmt->p;
13248 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13249 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13253 mpc_init2 (m0, prec);
13254 mpc_init2 (m1, prec);
13255 mpfr_from_real (mpc_realref(m0), re0, rnd);
13256 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13257 mpfr_from_real (mpc_realref(m1), re1, rnd);
13258 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13259 mpfr_clear_flags ();
13260 inexact = func (m0, m0, m1, crnd);
13261 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13271 The functions below provide an alternate interface for folding
13272 builtin function calls presented as GIMPLE_CALL statements rather
13273 than as CALL_EXPRs. The folded result is still expressed as a
13274 tree. There is too much code duplication in the handling of
13275 varargs functions, and a more intrusive re-factoring would permit
13276 better sharing of code between the tree and statement-based
13277 versions of these functions. */
13279 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13280 along with N new arguments specified as the "..." parameters. SKIP
13281 is the number of arguments in STMT to be omitted. This function is used
13282 to do varargs-to-varargs transformations. */
13285 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13287 int oldnargs = gimple_call_num_args (stmt);
13288 int nargs = oldnargs - skip + n;
13289 tree fntype = TREE_TYPE (fndecl);
13290 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13294 location_t loc = gimple_location (stmt);
13296 buffer = XALLOCAVEC (tree, nargs);
13298 for (i = 0; i < n; i++)
13299 buffer[i] = va_arg (ap, tree);
13301 for (j = skip; j < oldnargs; j++, i++)
13302 buffer[i] = gimple_call_arg (stmt, j);
13304 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13307 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13308 a normal call should be emitted rather than expanding the function
13309 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13312 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13314 tree dest, size, len, fn, fmt, flag;
13315 const char *fmt_str;
13316 int nargs = gimple_call_num_args (stmt);
13318 /* Verify the required arguments in the original call. */
13321 dest = gimple_call_arg (stmt, 0);
13322 if (!validate_arg (dest, POINTER_TYPE))
13324 flag = gimple_call_arg (stmt, 1);
13325 if (!validate_arg (flag, INTEGER_TYPE))
13327 size = gimple_call_arg (stmt, 2);
13328 if (!validate_arg (size, INTEGER_TYPE))
13330 fmt = gimple_call_arg (stmt, 3);
13331 if (!validate_arg (fmt, POINTER_TYPE))
13334 if (! host_integerp (size, 1))
13339 if (!init_target_chars ())
13342 /* Check whether the format is a literal string constant. */
13343 fmt_str = c_getstr (fmt);
13344 if (fmt_str != NULL)
13346 /* If the format doesn't contain % args or %%, we know the size. */
13347 if (strchr (fmt_str, target_percent) == 0)
13349 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13350 len = build_int_cstu (size_type_node, strlen (fmt_str));
13352 /* If the format is "%s" and first ... argument is a string literal,
13353 we know the size too. */
13354 else if (fcode == BUILT_IN_SPRINTF_CHK
13355 && strcmp (fmt_str, target_percent_s) == 0)
13361 arg = gimple_call_arg (stmt, 4);
13362 if (validate_arg (arg, POINTER_TYPE))
13364 len = c_strlen (arg, 1);
13365 if (! len || ! host_integerp (len, 1))
13372 if (! integer_all_onesp (size))
13374 if (! len || ! tree_int_cst_lt (len, size))
13378 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13379 or if format doesn't contain % chars or is "%s". */
13380 if (! integer_zerop (flag))
13382 if (fmt_str == NULL)
13384 if (strchr (fmt_str, target_percent) != NULL
13385 && strcmp (fmt_str, target_percent_s))
13389 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13390 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13391 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13395 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13398 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13399 a normal call should be emitted rather than expanding the function
13400 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13401 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13402 passed as second argument. */
13405 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13406 enum built_in_function fcode)
13408 tree dest, size, len, fn, fmt, flag;
13409 const char *fmt_str;
13411 /* Verify the required arguments in the original call. */
13412 if (gimple_call_num_args (stmt) < 5)
13414 dest = gimple_call_arg (stmt, 0);
13415 if (!validate_arg (dest, POINTER_TYPE))
13417 len = gimple_call_arg (stmt, 1);
13418 if (!validate_arg (len, INTEGER_TYPE))
13420 flag = gimple_call_arg (stmt, 2);
13421 if (!validate_arg (flag, INTEGER_TYPE))
13423 size = gimple_call_arg (stmt, 3);
13424 if (!validate_arg (size, INTEGER_TYPE))
13426 fmt = gimple_call_arg (stmt, 4);
13427 if (!validate_arg (fmt, POINTER_TYPE))
13430 if (! host_integerp (size, 1))
13433 if (! integer_all_onesp (size))
13435 if (! host_integerp (len, 1))
13437 /* If LEN is not constant, try MAXLEN too.
13438 For MAXLEN only allow optimizing into non-_ocs function
13439 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13440 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13446 if (tree_int_cst_lt (size, maxlen))
13450 if (!init_target_chars ())
13453 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13454 or if format doesn't contain % chars or is "%s". */
13455 if (! integer_zerop (flag))
13457 fmt_str = c_getstr (fmt);
13458 if (fmt_str == NULL)
13460 if (strchr (fmt_str, target_percent) != NULL
13461 && strcmp (fmt_str, target_percent_s))
13465 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13467 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13468 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13472 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13475 /* Builtins with folding operations that operate on "..." arguments
13476 need special handling; we need to store the arguments in a convenient
13477 data structure before attempting any folding. Fortunately there are
13478 only a few builtins that fall into this category. FNDECL is the
13479 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13480 result of the function call is ignored. */
13483 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13484 bool ignore ATTRIBUTE_UNUSED)
13486 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13487 tree ret = NULL_TREE;
13491 case BUILT_IN_SPRINTF_CHK:
13492 case BUILT_IN_VSPRINTF_CHK:
13493 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13496 case BUILT_IN_SNPRINTF_CHK:
13497 case BUILT_IN_VSNPRINTF_CHK:
13498 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13505 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13506 TREE_NO_WARNING (ret) = 1;
13512 /* A wrapper function for builtin folding that prevents warnings for
13513 "statement without effect" and the like, caused by removing the
13514 call node earlier than the warning is generated. */
13517 fold_call_stmt (gimple stmt, bool ignore)
13519 tree ret = NULL_TREE;
13520 tree fndecl = gimple_call_fndecl (stmt);
13521 location_t loc = gimple_location (stmt);
13523 && TREE_CODE (fndecl) == FUNCTION_DECL
13524 && DECL_BUILT_IN (fndecl)
13525 && !gimple_call_va_arg_pack_p (stmt))
13527 int nargs = gimple_call_num_args (stmt);
13529 if (avoid_folding_inline_builtin (fndecl))
13531 /* FIXME: Don't use a list in this interface. */
13532 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13534 tree arglist = NULL_TREE;
13536 for (i = nargs - 1; i >= 0; i--)
13537 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13538 return targetm.fold_builtin (fndecl, arglist, ignore);
13542 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13544 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13546 for (i = 0; i < nargs; i++)
13547 args[i] = gimple_call_arg (stmt, i);
13548 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13551 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13554 /* Propagate location information from original call to
13555 expansion of builtin. Otherwise things like
13556 maybe_emit_chk_warning, that operate on the expansion
13557 of a builtin, will use the wrong location information. */
13558 if (gimple_has_location (stmt))
13560 tree realret = ret;
13561 if (TREE_CODE (ret) == NOP_EXPR)
13562 realret = TREE_OPERAND (ret, 0);
13563 if (CAN_HAVE_LOCATION_P (realret)
13564 && !EXPR_HAS_LOCATION (realret))
13565 SET_EXPR_LOCATION (realret, loc);
13575 /* Look up the function in built_in_decls that corresponds to DECL
13576 and set ASMSPEC as its user assembler name. DECL must be a
13577 function decl that declares a builtin. */
13580 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13583 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13584 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13587 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13588 set_user_assembler_name (builtin, asmspec);
13589 switch (DECL_FUNCTION_CODE (decl))
13591 case BUILT_IN_MEMCPY:
13592 init_block_move_fn (asmspec);
13593 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13595 case BUILT_IN_MEMSET:
13596 init_block_clear_fn (asmspec);
13597 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13599 case BUILT_IN_MEMMOVE:
13600 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13602 case BUILT_IN_MEMCMP:
13603 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13605 case BUILT_IN_ABORT:
13606 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13609 if (INT_TYPE_SIZE < BITS_PER_WORD)
13611 set_user_assembler_libfunc ("ffs", asmspec);
13612 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13613 MODE_INT, 0), "ffs");