1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
241 /* Return true if DECL is a function symbol representing a built-in. */
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
316 inner = MIN (inner, (offset_factor & -offset_factor));
320 inner = MIN (inner, BITS_PER_UNIT);
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
377 switch (TREE_CODE (exp))
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
400 exp = TREE_OPERAND (exp, 0);
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
430 c_strlen (tree src, int only_value)
433 HOST_WIDE_INT offset;
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
457 loc = input_location;
459 src = string_constant (src, &offset_node);
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
473 for (i = 0; i < max; i++)
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
491 else if (! host_integerp (offset_node, 0))
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
526 src = string_constant (src, &offset_node);
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
543 c_readstr (const char *str, enum machine_mode mode)
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
654 SETUP_FRAME_ADDRESSES ();
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
796 if (i == ARRAY_SIZE (elim_regs))
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1270 apply_args_size (void)
1272 static int size = -1;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1303 apply_args_mode[regno] = VOIDmode;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1313 apply_result_size (void)
1315 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (targetm.calls.function_value_regno_p (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1384 expand_builtin_apply_args_1 (void)
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1463 temp = expand_builtin_apply_args_1 ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1515 do_pending_stack_adjust ();
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 /* Find the CALL insn we just emitted, and attach the register usage
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 /* Restore the return value and note that each value is used. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1687 call_fusage = get_insns ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1910 expand_call (exp, target, 0);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2127 return expand_call (exp, target, target == const0_rtx);
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2218 gcc_assert (result);
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2240 target = expand_call (exp, target, target == const0_rtx);
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2353 expand_builtin_sincos (tree exp)
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2359 location_t loc = EXPR_LOCATION (exp);
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2388 /* Move target1 and target2 to the memory locations indicated
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2404 tree fndecl = get_callee_fndecl (exp);
2406 enum machine_mode mode;
2408 location_t loc = EXPR_LOCATION (exp);
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2430 else if (TARGET_HAS_SINCOS)
2432 tree call, fn = NULL_TREE;
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2477 const char *name = NULL;
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2521 SET_EXPR_LOCATION (fn, loc);
2524 #define build_call_nofold(...) \
2525 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2527 /* Expand a call to one of the builtin rounding functions gcc defines
2528 as an extension (lfloor and lceil). As these are gcc extensions we
2529 do not need to worry about setting errno to EDOM.
2530 If expanding via optab fails, lower expression to (int)(floor(x)).
2531 EXP is the expression that is a call to the builtin function;
2532 if convenient, the result should be placed in TARGET. */
2535 expand_builtin_int_roundingfn (tree exp, rtx target)
2537 convert_optab builtin_optab;
2538 rtx op0, insns, tmp;
2539 tree fndecl = get_callee_fndecl (exp);
2540 enum built_in_function fallback_fn;
2541 tree fallback_fndecl;
2542 enum machine_mode mode;
2545 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2548 arg = CALL_EXPR_ARG (exp, 0);
2550 switch (DECL_FUNCTION_CODE (fndecl))
2552 CASE_FLT_FN (BUILT_IN_LCEIL):
2553 CASE_FLT_FN (BUILT_IN_LLCEIL):
2554 builtin_optab = lceil_optab;
2555 fallback_fn = BUILT_IN_CEIL;
2558 CASE_FLT_FN (BUILT_IN_LFLOOR):
2559 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2560 builtin_optab = lfloor_optab;
2561 fallback_fn = BUILT_IN_FLOOR;
2568 /* Make a suitable register to place result in. */
2569 mode = TYPE_MODE (TREE_TYPE (exp));
2571 target = gen_reg_rtx (mode);
2573 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2574 need to expand the argument again. This way, we will not perform
2575 side-effects more the once. */
2576 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2578 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2582 /* Compute into TARGET. */
2583 if (expand_sfix_optab (target, op0, builtin_optab))
2585 /* Output the entire sequence. */
2586 insns = get_insns ();
2592 /* If we were unable to expand via the builtin, stop the sequence
2593 (without outputting the insns). */
2596 /* Fall back to floating point rounding optab. */
2597 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2599 /* For non-C99 targets we may end up without a fallback fndecl here
2600 if the user called __builtin_lfloor directly. In this case emit
2601 a call to the floor/ceil variants nevertheless. This should result
2602 in the best user experience for not full C99 targets. */
2603 if (fallback_fndecl == NULL_TREE)
2606 const char *name = NULL;
2608 switch (DECL_FUNCTION_CODE (fndecl))
2610 case BUILT_IN_LCEIL:
2611 case BUILT_IN_LLCEIL:
2614 case BUILT_IN_LCEILF:
2615 case BUILT_IN_LLCEILF:
2618 case BUILT_IN_LCEILL:
2619 case BUILT_IN_LLCEILL:
2622 case BUILT_IN_LFLOOR:
2623 case BUILT_IN_LLFLOOR:
2626 case BUILT_IN_LFLOORF:
2627 case BUILT_IN_LLFLOORF:
2630 case BUILT_IN_LFLOORL:
2631 case BUILT_IN_LLFLOORL:
2638 fntype = build_function_type_list (TREE_TYPE (arg),
2639 TREE_TYPE (arg), NULL_TREE);
2640 fallback_fndecl = build_fn_decl (name, fntype);
2643 exp = build_call_nofold (fallback_fndecl, 1, arg);
2645 tmp = expand_normal (exp);
2647 /* Truncate the result of floating point optab to integer
2648 via expand_fix (). */
2649 target = gen_reg_rtx (mode);
2650 expand_fix (target, tmp, 0);
2655 /* Expand a call to one of the builtin math functions doing integer
2657 Return 0 if a normal call should be emitted rather than expanding the
2658 function in-line. EXP is the expression that is a call to the builtin
2659 function; if convenient, the result should be placed in TARGET. */
2662 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2664 convert_optab builtin_optab;
2666 tree fndecl = get_callee_fndecl (exp);
2668 enum machine_mode mode;
2670 /* There's no easy way to detect the case we need to set EDOM. */
2671 if (flag_errno_math)
2674 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2677 arg = CALL_EXPR_ARG (exp, 0);
2679 switch (DECL_FUNCTION_CODE (fndecl))
2681 CASE_FLT_FN (BUILT_IN_LRINT):
2682 CASE_FLT_FN (BUILT_IN_LLRINT):
2683 builtin_optab = lrint_optab; break;
2684 CASE_FLT_FN (BUILT_IN_LROUND):
2685 CASE_FLT_FN (BUILT_IN_LLROUND):
2686 builtin_optab = lround_optab; break;
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2694 target = gen_reg_rtx (mode);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns) and call to the library function
2716 with the stabilized argument list. */
2719 target = expand_call (exp, target, target == const0_rtx);
2724 /* To evaluate powi(x,n), the floating point value x raised to the
2725 constant integer exponent n, we use a hybrid algorithm that
2726 combines the "window method" with look-up tables. For an
2727 introduction to exponentiation algorithms and "addition chains",
2728 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2729 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2730 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2731 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2733 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2734 multiplications to inline before calling the system library's pow
2735 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2736 so this default never requires calling pow, powf or powl. */
2738 #ifndef POWI_MAX_MULTS
2739 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2742 /* The size of the "optimal power tree" lookup table. All
2743 exponents less than this value are simply looked up in the
2744 powi_table below. This threshold is also used to size the
2745 cache of pseudo registers that hold intermediate results. */
2746 #define POWI_TABLE_SIZE 256
2748 /* The size, in bits of the window, used in the "window method"
2749 exponentiation algorithm. This is equivalent to a radix of
2750 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2751 #define POWI_WINDOW_SIZE 3
2753 /* The following table is an efficient representation of an
2754 "optimal power tree". For each value, i, the corresponding
2755 value, j, in the table states than an optimal evaluation
2756 sequence for calculating pow(x,i) can be found by evaluating
2757 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2758 100 integers is given in Knuth's "Seminumerical algorithms". */
2760 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2762 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2763 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2764 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2765 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2766 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2767 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2768 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2769 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2770 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2771 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2772 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2773 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2774 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2775 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2776 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2777 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2778 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2779 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2780 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2781 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2782 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2783 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2784 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2785 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2786 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2787 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2788 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2789 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2790 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2791 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2792 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2793 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2797 /* Return the number of multiplications required to calculate
2798 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2799 subroutine of powi_cost. CACHE is an array indicating
2800 which exponents have already been calculated. */
2803 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2805 /* If we've already calculated this exponent, then this evaluation
2806 doesn't require any additional multiplications. */
2811 return powi_lookup_cost (n - powi_table[n], cache)
2812 + powi_lookup_cost (powi_table[n], cache) + 1;
2815 /* Return the number of multiplications required to calculate
2816 powi(x,n) for an arbitrary x, given the exponent N. This
2817 function needs to be kept in sync with expand_powi below. */
2820 powi_cost (HOST_WIDE_INT n)
2822 bool cache[POWI_TABLE_SIZE];
2823 unsigned HOST_WIDE_INT digit;
2824 unsigned HOST_WIDE_INT val;
2830 /* Ignore the reciprocal when calculating the cost. */
2831 val = (n < 0) ? -n : n;
2833 /* Initialize the exponent cache. */
2834 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2839 while (val >= POWI_TABLE_SIZE)
2843 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2844 result += powi_lookup_cost (digit, cache)
2845 + POWI_WINDOW_SIZE + 1;
2846 val >>= POWI_WINDOW_SIZE;
2855 return result + powi_lookup_cost (val, cache);
2858 /* Recursive subroutine of expand_powi. This function takes the array,
2859 CACHE, of already calculated exponents and an exponent N and returns
2860 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2863 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2865 unsigned HOST_WIDE_INT digit;
2869 if (n < POWI_TABLE_SIZE)
2874 target = gen_reg_rtx (mode);
2877 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2878 op1 = expand_powi_1 (mode, powi_table[n], cache);
2882 target = gen_reg_rtx (mode);
2883 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2884 op0 = expand_powi_1 (mode, n - digit, cache);
2885 op1 = expand_powi_1 (mode, digit, cache);
2889 target = gen_reg_rtx (mode);
2890 op0 = expand_powi_1 (mode, n >> 1, cache);
2894 result = expand_mult (mode, op0, op1, target, 0);
2895 if (result != target)
2896 emit_move_insn (target, result);
2900 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2901 floating point operand in mode MODE, and N is the exponent. This
2902 function needs to be kept in sync with powi_cost above. */
2905 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2907 rtx cache[POWI_TABLE_SIZE];
2911 return CONST1_RTX (mode);
2913 memset (cache, 0, sizeof (cache));
2916 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2918 /* If the original exponent was negative, reciprocate the result. */
2920 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2921 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2926 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2927 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2928 if we can simplify it. */
2930 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2933 if (TREE_CODE (arg1) == REAL_CST
2934 && !TREE_OVERFLOW (arg1)
2935 && flag_unsafe_math_optimizations)
2937 enum machine_mode mode = TYPE_MODE (type);
2938 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2939 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2940 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2941 tree op = NULL_TREE;
2945 /* Optimize pow (x, 0.5) into sqrt. */
2946 if (REAL_VALUES_EQUAL (c, dconsthalf))
2947 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2951 REAL_VALUE_TYPE dconst1_4 = dconst1;
2952 REAL_VALUE_TYPE dconst3_4;
2953 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2955 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2956 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2958 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2959 machines that a builtin sqrt instruction is smaller than a
2960 call to pow with 0.25, so do this optimization even if
2962 if (REAL_VALUES_EQUAL (c, dconst1_4))
2964 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2965 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2968 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2969 are optimizing for space. */
2970 else if (optimize_insn_for_speed_p ()
2971 && !TREE_SIDE_EFFECTS (arg0)
2972 && REAL_VALUES_EQUAL (c, dconst3_4))
2974 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2975 tree sqrt2 = builtin_save_expr (sqrt1);
2976 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2977 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2982 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2983 cbrt/sqrts instead of pow (x, 1./6.). */
2985 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2987 /* First try 1/3. */
2988 REAL_VALUE_TYPE dconst1_3
2989 = real_value_truncate (mode, dconst_third ());
2991 if (REAL_VALUES_EQUAL (c, dconst1_3))
2992 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2995 else if (optimize_insn_for_speed_p ())
2997 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2998 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3000 if (REAL_VALUES_EQUAL (c, dconst1_6))
3002 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3003 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3009 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3015 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3016 a normal call should be emitted rather than expanding the function
3017 in-line. EXP is the expression that is a call to the builtin
3018 function; if convenient, the result should be placed in TARGET. */
3021 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3025 tree type = TREE_TYPE (exp);
3026 REAL_VALUE_TYPE cint, c, c2;
3029 enum machine_mode mode = TYPE_MODE (type);
3031 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3034 arg0 = CALL_EXPR_ARG (exp, 0);
3035 arg1 = CALL_EXPR_ARG (exp, 1);
3037 if (TREE_CODE (arg1) != REAL_CST
3038 || TREE_OVERFLOW (arg1))
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3041 /* Handle constant exponents. */
3043 /* For integer valued exponents we can expand to an optimal multiplication
3044 sequence using expand_powi. */
3045 c = TREE_REAL_CST (arg1);
3046 n = real_to_integer (&c);
3047 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3048 if (real_identical (&c, &cint)
3049 && ((n >= -1 && n <= 2)
3050 || (flag_unsafe_math_optimizations
3051 && optimize_insn_for_speed_p ()
3052 && powi_cost (n) <= POWI_MAX_MULTS)))
3054 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3057 op = force_reg (mode, op);
3058 op = expand_powi (op, mode, n);
3063 narg0 = builtin_save_expr (arg0);
3065 /* If the exponent is not integer valued, check if it is half of an integer.
3066 In this case we can expand to sqrt (x) * x**(n/2). */
3067 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3068 if (fn != NULL_TREE)
3070 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3071 n = real_to_integer (&c2);
3072 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3073 if (real_identical (&c2, &cint)
3074 && ((flag_unsafe_math_optimizations
3075 && optimize_insn_for_speed_p ()
3076 && powi_cost (n/2) <= POWI_MAX_MULTS)
3077 /* Even the c==0.5 case cannot be done unconditionally
3078 when we need to preserve signed zeros, as
3079 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3080 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)))
3082 tree call_expr = build_call_nofold (fn, 1, narg0);
3083 /* Use expand_expr in case the newly built call expression
3084 was folded to a non-call. */
3085 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3088 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3089 op2 = force_reg (mode, op2);
3090 op2 = expand_powi (op2, mode, abs (n / 2));
3091 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3092 0, OPTAB_LIB_WIDEN);
3093 /* If the original exponent was negative, reciprocate the
3096 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3097 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3103 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3105 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3110 /* Try if the exponent is a third of an integer. In this case
3111 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3112 different from pow (x, 1./3.) due to rounding and behavior
3113 with negative x we need to constrain this transformation to
3114 unsafe math and positive x or finite math. */
3115 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3117 && flag_unsafe_math_optimizations
3118 && (tree_expr_nonnegative_p (arg0)
3119 || !HONOR_NANS (mode)))
3121 REAL_VALUE_TYPE dconst3;
3122 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3123 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3124 real_round (&c2, mode, &c2);
3125 n = real_to_integer (&c2);
3126 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3127 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3128 real_convert (&c2, mode, &c2);
3129 if (real_identical (&c2, &c)
3130 && ((optimize_insn_for_speed_p ()
3131 && powi_cost (n/3) <= POWI_MAX_MULTS)
3134 tree call_expr = build_call_nofold (fn, 1,narg0);
3135 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3136 if (abs (n) % 3 == 2)
3137 op = expand_simple_binop (mode, MULT, op, op, op,
3138 0, OPTAB_LIB_WIDEN);
3141 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3142 op2 = force_reg (mode, op2);
3143 op2 = expand_powi (op2, mode, abs (n / 3));
3144 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3145 0, OPTAB_LIB_WIDEN);
3146 /* If the original exponent was negative, reciprocate the
3149 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3150 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3156 /* Fall back to optab expansion. */
3157 return expand_builtin_mathfn_2 (exp, target, subtarget);
3160 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3161 a normal call should be emitted rather than expanding the function
3162 in-line. EXP is the expression that is a call to the builtin
3163 function; if convenient, the result should be placed in TARGET. */
3166 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3170 enum machine_mode mode;
3171 enum machine_mode mode2;
3173 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3176 arg0 = CALL_EXPR_ARG (exp, 0);
3177 arg1 = CALL_EXPR_ARG (exp, 1);
3178 mode = TYPE_MODE (TREE_TYPE (exp));
3180 /* Handle constant power. */
3182 if (TREE_CODE (arg1) == INTEGER_CST
3183 && !TREE_OVERFLOW (arg1))
3185 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3187 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3188 Otherwise, check the number of multiplications required. */
3189 if ((TREE_INT_CST_HIGH (arg1) == 0
3190 || TREE_INT_CST_HIGH (arg1) == -1)
3191 && ((n >= -1 && n <= 2)
3192 || (optimize_insn_for_speed_p ()
3193 && powi_cost (n) <= POWI_MAX_MULTS)))
3195 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3196 op0 = force_reg (mode, op0);
3197 return expand_powi (op0, mode, n);
3201 /* Emit a libcall to libgcc. */
3203 /* Mode of the 2nd argument must match that of an int. */
3204 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3206 if (target == NULL_RTX)
3207 target = gen_reg_rtx (mode);
3209 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3210 if (GET_MODE (op0) != mode)
3211 op0 = convert_to_mode (mode, op0, 0);
3212 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3213 if (GET_MODE (op1) != mode2)
3214 op1 = convert_to_mode (mode2, op1, 0);
3216 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3217 target, LCT_CONST, mode, 2,
3218 op0, mode, op1, mode2);
3223 /* Expand expression EXP which is a call to the strlen builtin. Return
3224 NULL_RTX if we failed the caller should emit a normal call, otherwise
3225 try to get the result in TARGET, if convenient. */
3228 expand_builtin_strlen (tree exp, rtx target,
3229 enum machine_mode target_mode)
3231 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3237 tree src = CALL_EXPR_ARG (exp, 0);
3238 rtx result, src_reg, char_rtx, before_strlen;
3239 enum machine_mode insn_mode = target_mode, char_mode;
3240 enum insn_code icode = CODE_FOR_nothing;
3243 /* If the length can be computed at compile-time, return it. */
3244 len = c_strlen (src, 0);
3246 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3248 /* If the length can be computed at compile-time and is constant
3249 integer, but there are side-effects in src, evaluate
3250 src for side-effects, then return len.
3251 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3252 can be optimized into: i++; x = 3; */
3253 len = c_strlen (src, 1);
3254 if (len && TREE_CODE (len) == INTEGER_CST)
3256 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3257 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3260 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3262 /* If SRC is not a pointer type, don't do this operation inline. */
3266 /* Bail out if we can't compute strlen in the right mode. */
3267 while (insn_mode != VOIDmode)
3269 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3270 if (icode != CODE_FOR_nothing)
3273 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3275 if (insn_mode == VOIDmode)
3278 /* Make a place to write the result of the instruction. */
3282 && GET_MODE (result) == insn_mode
3283 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3284 result = gen_reg_rtx (insn_mode);
3286 /* Make a place to hold the source address. We will not expand
3287 the actual source until we are sure that the expansion will
3288 not fail -- there are trees that cannot be expanded twice. */
3289 src_reg = gen_reg_rtx (Pmode);
3291 /* Mark the beginning of the strlen sequence so we can emit the
3292 source operand later. */
3293 before_strlen = get_last_insn ();
3295 char_rtx = const0_rtx;
3296 char_mode = insn_data[(int) icode].operand[2].mode;
3297 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3299 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3301 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3302 char_rtx, GEN_INT (align));
3307 /* Now that we are assured of success, expand the source. */
3309 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3311 emit_move_insn (src_reg, pat);
3316 emit_insn_after (pat, before_strlen);
3318 emit_insn_before (pat, get_insns ());
3320 /* Return the value in the proper mode for this function. */
3321 if (GET_MODE (result) == target_mode)
3323 else if (target != 0)
3324 convert_move (target, result, 0);
3326 target = convert_to_mode (target_mode, result, 0);
3332 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3333 bytes from constant string DATA + OFFSET and return it as target
3337 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3338 enum machine_mode mode)
3340 const char *str = (const char *) data;
3342 gcc_assert (offset >= 0
3343 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3344 <= strlen (str) + 1));
3346 return c_readstr (str + offset, mode);
3349 /* Expand a call EXP to the memcpy builtin.
3350 Return NULL_RTX if we failed, the caller should emit a normal call,
3351 otherwise try to get the result in TARGET, if convenient (and in
3352 mode MODE if that's convenient). */
3355 expand_builtin_memcpy (tree exp, rtx target)
3357 if (!validate_arglist (exp,
3358 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3362 tree dest = CALL_EXPR_ARG (exp, 0);
3363 tree src = CALL_EXPR_ARG (exp, 1);
3364 tree len = CALL_EXPR_ARG (exp, 2);
3365 const char *src_str;
3366 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3367 unsigned int dest_align
3368 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3369 rtx dest_mem, src_mem, dest_addr, len_rtx;
3370 HOST_WIDE_INT expected_size = -1;
3371 unsigned int expected_align = 0;
3373 /* If DEST is not a pointer type, call the normal function. */
3374 if (dest_align == 0)
3377 /* If either SRC is not a pointer type, don't do this
3378 operation in-line. */
3382 if (currently_expanding_gimple_stmt)
3383 stringop_block_profile (currently_expanding_gimple_stmt,
3384 &expected_align, &expected_size);
3386 if (expected_align < dest_align)
3387 expected_align = dest_align;
3388 dest_mem = get_memory_rtx (dest, len);
3389 set_mem_align (dest_mem, dest_align);
3390 len_rtx = expand_normal (len);
3391 src_str = c_getstr (src);
3393 /* If SRC is a string constant and block move would be done
3394 by pieces, we can avoid loading the string from memory
3395 and only stored the computed constants. */
3397 && CONST_INT_P (len_rtx)
3398 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3399 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3400 CONST_CAST (char *, src_str),
3403 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3404 builtin_memcpy_read_str,
3405 CONST_CAST (char *, src_str),
3406 dest_align, false, 0);
3407 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3408 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3412 src_mem = get_memory_rtx (src, len);
3413 set_mem_align (src_mem, src_align);
3415 /* Copy word part most expediently. */
3416 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3417 CALL_EXPR_TAILCALL (exp)
3418 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3419 expected_align, expected_size);
3423 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3424 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3430 /* Expand a call EXP to the mempcpy builtin.
3431 Return NULL_RTX if we failed; the caller should emit a normal call,
3432 otherwise try to get the result in TARGET, if convenient (and in
3433 mode MODE if that's convenient). If ENDP is 0 return the
3434 destination pointer, if ENDP is 1 return the end pointer ala
3435 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3439 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3441 if (!validate_arglist (exp,
3442 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3446 tree dest = CALL_EXPR_ARG (exp, 0);
3447 tree src = CALL_EXPR_ARG (exp, 1);
3448 tree len = CALL_EXPR_ARG (exp, 2);
3449 return expand_builtin_mempcpy_args (dest, src, len,
3450 target, mode, /*endp=*/ 1);
3454 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3455 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3456 so that this can also be called without constructing an actual CALL_EXPR.
3457 The other arguments and return value are the same as for
3458 expand_builtin_mempcpy. */
3461 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3462 rtx target, enum machine_mode mode, int endp)
3464 /* If return value is ignored, transform mempcpy into memcpy. */
3465 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3467 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3468 tree result = build_call_nofold (fn, 3, dest, src, len);
3469 return expand_expr (result, target, mode, EXPAND_NORMAL);
3473 const char *src_str;
3474 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3475 unsigned int dest_align
3476 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3477 rtx dest_mem, src_mem, len_rtx;
3479 /* If either SRC or DEST is not a pointer type, don't do this
3480 operation in-line. */
3481 if (dest_align == 0 || src_align == 0)
3484 /* If LEN is not constant, call the normal function. */
3485 if (! host_integerp (len, 1))
3488 len_rtx = expand_normal (len);
3489 src_str = c_getstr (src);
3491 /* If SRC is a string constant and block move would be done
3492 by pieces, we can avoid loading the string from memory
3493 and only stored the computed constants. */
3495 && CONST_INT_P (len_rtx)
3496 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3497 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3498 CONST_CAST (char *, src_str),
3501 dest_mem = get_memory_rtx (dest, len);
3502 set_mem_align (dest_mem, dest_align);
3503 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3504 builtin_memcpy_read_str,
3505 CONST_CAST (char *, src_str),
3506 dest_align, false, endp);
3507 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3508 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3512 if (CONST_INT_P (len_rtx)
3513 && can_move_by_pieces (INTVAL (len_rtx),
3514 MIN (dest_align, src_align)))
3516 dest_mem = get_memory_rtx (dest, len);
3517 set_mem_align (dest_mem, dest_align);
3518 src_mem = get_memory_rtx (src, len);
3519 set_mem_align (src_mem, src_align);
3520 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3521 MIN (dest_align, src_align), endp);
3522 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3523 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3532 # define HAVE_movstr 0
3533 # define CODE_FOR_movstr CODE_FOR_nothing
3536 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3537 we failed, the caller should emit a normal call, otherwise try to
3538 get the result in TARGET, if convenient. If ENDP is 0 return the
3539 destination pointer, if ENDP is 1 return the end pointer ala
3540 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3544 expand_movstr (tree dest, tree src, rtx target, int endp)
3550 const struct insn_data * data;
3555 dest_mem = get_memory_rtx (dest, NULL);
3556 src_mem = get_memory_rtx (src, NULL);
3559 target = force_reg (Pmode, XEXP (dest_mem, 0));
3560 dest_mem = replace_equiv_address (dest_mem, target);
3561 end = gen_reg_rtx (Pmode);
3565 if (target == 0 || target == const0_rtx)
3567 end = gen_reg_rtx (Pmode);
3575 data = insn_data + CODE_FOR_movstr;
3577 if (data->operand[0].mode != VOIDmode)
3578 end = gen_lowpart (data->operand[0].mode, end);
3580 insn = data->genfun (end, dest_mem, src_mem);
3586 /* movstr is supposed to set end to the address of the NUL
3587 terminator. If the caller requested a mempcpy-like return value,
3589 if (endp == 1 && target != const0_rtx)
3591 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3592 emit_move_insn (target, force_operand (tem, NULL_RTX));
3598 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3599 NULL_RTX if we failed the caller should emit a normal call, otherwise
3600 try to get the result in TARGET, if convenient (and in mode MODE if that's
3604 expand_builtin_strcpy (tree exp, rtx target)
3606 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3608 tree dest = CALL_EXPR_ARG (exp, 0);
3609 tree src = CALL_EXPR_ARG (exp, 1);
3610 return expand_builtin_strcpy_args (dest, src, target);
3615 /* Helper function to do the actual work for expand_builtin_strcpy. The
3616 arguments to the builtin_strcpy call DEST and SRC are broken out
3617 so that this can also be called without constructing an actual CALL_EXPR.
3618 The other arguments and return value are the same as for
3619 expand_builtin_strcpy. */
3622 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3624 return expand_movstr (dest, src, target, /*endp=*/0);
3627 /* Expand a call EXP to the stpcpy builtin.
3628 Return NULL_RTX if we failed the caller should emit a normal call,
3629 otherwise try to get the result in TARGET, if convenient (and in
3630 mode MODE if that's convenient). */
3633 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3636 location_t loc = EXPR_LOCATION (exp);
3638 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3641 dst = CALL_EXPR_ARG (exp, 0);
3642 src = CALL_EXPR_ARG (exp, 1);
3644 /* If return value is ignored, transform stpcpy into strcpy. */
3645 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3647 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3648 tree result = build_call_nofold (fn, 2, dst, src);
3649 return expand_expr (result, target, mode, EXPAND_NORMAL);
3656 /* Ensure we get an actual string whose length can be evaluated at
3657 compile-time, not an expression containing a string. This is
3658 because the latter will potentially produce pessimized code
3659 when used to produce the return value. */
3660 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3661 return expand_movstr (dst, src, target, /*endp=*/2);
3663 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3664 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3665 target, mode, /*endp=*/2);
3670 if (TREE_CODE (len) == INTEGER_CST)
3672 rtx len_rtx = expand_normal (len);
3674 if (CONST_INT_P (len_rtx))
3676 ret = expand_builtin_strcpy_args (dst, src, target);
3682 if (mode != VOIDmode)
3683 target = gen_reg_rtx (mode);
3685 target = gen_reg_rtx (GET_MODE (ret));
3687 if (GET_MODE (target) != GET_MODE (ret))
3688 ret = gen_lowpart (GET_MODE (target), ret);
3690 ret = plus_constant (ret, INTVAL (len_rtx));
3691 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3699 return expand_movstr (dst, src, target, /*endp=*/2);
3703 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3704 bytes from constant string DATA + OFFSET and return it as target
3708 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3709 enum machine_mode mode)
3711 const char *str = (const char *) data;
3713 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3716 return c_readstr (str + offset, mode);
3719 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3720 NULL_RTX if we failed the caller should emit a normal call. */
3723 expand_builtin_strncpy (tree exp, rtx target)
3725 location_t loc = EXPR_LOCATION (exp);
3727 if (validate_arglist (exp,
3728 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3730 tree dest = CALL_EXPR_ARG (exp, 0);
3731 tree src = CALL_EXPR_ARG (exp, 1);
3732 tree len = CALL_EXPR_ARG (exp, 2);
3733 tree slen = c_strlen (src, 1);
3735 /* We must be passed a constant len and src parameter. */
3736 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3739 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3741 /* We're required to pad with trailing zeros if the requested
3742 len is greater than strlen(s2)+1. In that case try to
3743 use store_by_pieces, if it fails, punt. */
3744 if (tree_int_cst_lt (slen, len))
3746 unsigned int dest_align
3747 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3748 const char *p = c_getstr (src);
3751 if (!p || dest_align == 0 || !host_integerp (len, 1)
3752 || !can_store_by_pieces (tree_low_cst (len, 1),
3753 builtin_strncpy_read_str,
3754 CONST_CAST (char *, p),
3758 dest_mem = get_memory_rtx (dest, len);
3759 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3760 builtin_strncpy_read_str,
3761 CONST_CAST (char *, p), dest_align, false, 0);
3762 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3763 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3770 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3771 bytes from constant string DATA + OFFSET and return it as target
3775 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3776 enum machine_mode mode)
3778 const char *c = (const char *) data;
3779 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3781 memset (p, *c, GET_MODE_SIZE (mode));
3783 return c_readstr (p, mode);
3786 /* Callback routine for store_by_pieces. Return the RTL of a register
3787 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3788 char value given in the RTL register data. For example, if mode is
3789 4 bytes wide, return the RTL for 0x01010101*data. */
3792 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3793 enum machine_mode mode)
3799 size = GET_MODE_SIZE (mode);
3803 p = XALLOCAVEC (char, size);
3804 memset (p, 1, size);
3805 coeff = c_readstr (p, mode);
3807 target = convert_to_mode (mode, (rtx) data, 1);
3808 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3809 return force_reg (mode, target);
3812 /* Expand expression EXP, which is a call to the memset builtin. Return
3813 NULL_RTX if we failed the caller should emit a normal call, otherwise
3814 try to get the result in TARGET, if convenient (and in mode MODE if that's
3818 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3820 if (!validate_arglist (exp,
3821 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3825 tree dest = CALL_EXPR_ARG (exp, 0);
3826 tree val = CALL_EXPR_ARG (exp, 1);
3827 tree len = CALL_EXPR_ARG (exp, 2);
3828 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3832 /* Helper function to do the actual work for expand_builtin_memset. The
3833 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3834 so that this can also be called without constructing an actual CALL_EXPR.
3835 The other arguments and return value are the same as for
3836 expand_builtin_memset. */
3839 expand_builtin_memset_args (tree dest, tree val, tree len,
3840 rtx target, enum machine_mode mode, tree orig_exp)
3843 enum built_in_function fcode;
3845 unsigned int dest_align;
3846 rtx dest_mem, dest_addr, len_rtx;
3847 HOST_WIDE_INT expected_size = -1;
3848 unsigned int expected_align = 0;
3850 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3852 /* If DEST is not a pointer type, don't do this operation in-line. */
3853 if (dest_align == 0)
3856 if (currently_expanding_gimple_stmt)
3857 stringop_block_profile (currently_expanding_gimple_stmt,
3858 &expected_align, &expected_size);
3860 if (expected_align < dest_align)
3861 expected_align = dest_align;
3863 /* If the LEN parameter is zero, return DEST. */
3864 if (integer_zerop (len))
3866 /* Evaluate and ignore VAL in case it has side-effects. */
3867 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3868 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3871 /* Stabilize the arguments in case we fail. */
3872 dest = builtin_save_expr (dest);
3873 val = builtin_save_expr (val);
3874 len = builtin_save_expr (len);
3876 len_rtx = expand_normal (len);
3877 dest_mem = get_memory_rtx (dest, len);
3879 if (TREE_CODE (val) != INTEGER_CST)
3883 val_rtx = expand_normal (val);
3884 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3887 /* Assume that we can memset by pieces if we can store
3888 * the coefficients by pieces (in the required modes).
3889 * We can't pass builtin_memset_gen_str as that emits RTL. */
3891 if (host_integerp (len, 1)
3892 && can_store_by_pieces (tree_low_cst (len, 1),
3893 builtin_memset_read_str, &c, dest_align,
3896 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3898 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3899 builtin_memset_gen_str, val_rtx, dest_align,
3902 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3903 dest_align, expected_align,
3907 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3908 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3912 if (target_char_cast (val, &c))
3917 if (host_integerp (len, 1)
3918 && can_store_by_pieces (tree_low_cst (len, 1),
3919 builtin_memset_read_str, &c, dest_align,
3921 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3922 builtin_memset_read_str, &c, dest_align, true, 0);
3923 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3924 dest_align, expected_align,
3928 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3929 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3933 set_mem_align (dest_mem, dest_align);
3934 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3935 CALL_EXPR_TAILCALL (orig_exp)
3936 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3937 expected_align, expected_size);
3941 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3942 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3948 fndecl = get_callee_fndecl (orig_exp);
3949 fcode = DECL_FUNCTION_CODE (fndecl);
3950 if (fcode == BUILT_IN_MEMSET)
3951 fn = build_call_nofold (fndecl, 3, dest, val, len);
3952 else if (fcode == BUILT_IN_BZERO)
3953 fn = build_call_nofold (fndecl, 2, dest, len);
3956 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3957 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3958 return expand_call (fn, target, target == const0_rtx);
3961 /* Expand expression EXP, which is a call to the bzero builtin. Return
3962 NULL_RTX if we failed the caller should emit a normal call. */
3965 expand_builtin_bzero (tree exp)
3968 location_t loc = EXPR_LOCATION (exp);
3970 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3973 dest = CALL_EXPR_ARG (exp, 0);
3974 size = CALL_EXPR_ARG (exp, 1);
3976 /* New argument list transforming bzero(ptr x, int y) to
3977 memset(ptr x, int 0, size_t y). This is done this way
3978 so that if it isn't expanded inline, we fallback to
3979 calling bzero instead of memset. */
3981 return expand_builtin_memset_args (dest, integer_zero_node,
3982 fold_convert_loc (loc, sizetype, size),
3983 const0_rtx, VOIDmode, exp);
3986 /* Expand expression EXP, which is a call to the memcmp built-in function.
3987 Return NULL_RTX if we failed and the
3988 caller should emit a normal call, otherwise try to get the result in
3989 TARGET, if convenient (and in mode MODE, if that's convenient). */
3992 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3993 ATTRIBUTE_UNUSED enum machine_mode mode)
3995 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3997 if (!validate_arglist (exp,
3998 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4001 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4003 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4006 tree arg1 = CALL_EXPR_ARG (exp, 0);
4007 tree arg2 = CALL_EXPR_ARG (exp, 1);
4008 tree len = CALL_EXPR_ARG (exp, 2);
4011 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4013 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4014 enum machine_mode insn_mode;
4016 #ifdef HAVE_cmpmemsi
4018 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4021 #ifdef HAVE_cmpstrnsi
4023 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4028 /* If we don't have POINTER_TYPE, call the function. */
4029 if (arg1_align == 0 || arg2_align == 0)
4032 /* Make a place to write the result of the instruction. */
4035 && REG_P (result) && GET_MODE (result) == insn_mode
4036 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4037 result = gen_reg_rtx (insn_mode);
4039 arg1_rtx = get_memory_rtx (arg1, len);
4040 arg2_rtx = get_memory_rtx (arg2, len);
4041 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4043 /* Set MEM_SIZE as appropriate. */
4044 if (CONST_INT_P (arg3_rtx))
4046 set_mem_size (arg1_rtx, arg3_rtx);
4047 set_mem_size (arg2_rtx, arg3_rtx);
4050 #ifdef HAVE_cmpmemsi
4052 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4053 GEN_INT (MIN (arg1_align, arg2_align)));
4056 #ifdef HAVE_cmpstrnsi
4058 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4059 GEN_INT (MIN (arg1_align, arg2_align)));
4067 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4068 TYPE_MODE (integer_type_node), 3,
4069 XEXP (arg1_rtx, 0), Pmode,
4070 XEXP (arg2_rtx, 0), Pmode,
4071 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4072 TYPE_UNSIGNED (sizetype)),
4073 TYPE_MODE (sizetype));
4075 /* Return the value in the proper mode for this function. */
4076 mode = TYPE_MODE (TREE_TYPE (exp));
4077 if (GET_MODE (result) == mode)
4079 else if (target != 0)
4081 convert_move (target, result, 0);
4085 return convert_to_mode (mode, result, 0);
4092 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4093 if we failed the caller should emit a normal call, otherwise try to get
4094 the result in TARGET, if convenient. */
4097 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4099 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4102 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4103 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4104 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4106 rtx arg1_rtx, arg2_rtx;
4107 rtx result, insn = NULL_RTX;
4109 tree arg1 = CALL_EXPR_ARG (exp, 0);
4110 tree arg2 = CALL_EXPR_ARG (exp, 1);
4113 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4115 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4117 /* If we don't have POINTER_TYPE, call the function. */
4118 if (arg1_align == 0 || arg2_align == 0)
4121 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4122 arg1 = builtin_save_expr (arg1);
4123 arg2 = builtin_save_expr (arg2);
4125 arg1_rtx = get_memory_rtx (arg1, NULL);
4126 arg2_rtx = get_memory_rtx (arg2, NULL);
4128 #ifdef HAVE_cmpstrsi
4129 /* Try to call cmpstrsi. */
4132 enum machine_mode insn_mode
4133 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4135 /* Make a place to write the result of the instruction. */
4138 && REG_P (result) && GET_MODE (result) == insn_mode
4139 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4140 result = gen_reg_rtx (insn_mode);
4142 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4143 GEN_INT (MIN (arg1_align, arg2_align)));
4146 #ifdef HAVE_cmpstrnsi
4147 /* Try to determine at least one length and call cmpstrnsi. */
4148 if (!insn && HAVE_cmpstrnsi)
4153 enum machine_mode insn_mode
4154 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4155 tree len1 = c_strlen (arg1, 1);
4156 tree len2 = c_strlen (arg2, 1);
4159 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4161 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4163 /* If we don't have a constant length for the first, use the length
4164 of the second, if we know it. We don't require a constant for
4165 this case; some cost analysis could be done if both are available
4166 but neither is constant. For now, assume they're equally cheap,
4167 unless one has side effects. If both strings have constant lengths,
4174 else if (TREE_SIDE_EFFECTS (len1))
4176 else if (TREE_SIDE_EFFECTS (len2))
4178 else if (TREE_CODE (len1) != INTEGER_CST)
4180 else if (TREE_CODE (len2) != INTEGER_CST)
4182 else if (tree_int_cst_lt (len1, len2))
4187 /* If both arguments have side effects, we cannot optimize. */
4188 if (!len || TREE_SIDE_EFFECTS (len))
4191 arg3_rtx = expand_normal (len);
4193 /* Make a place to write the result of the instruction. */
4196 && REG_P (result) && GET_MODE (result) == insn_mode
4197 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4198 result = gen_reg_rtx (insn_mode);
4200 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4201 GEN_INT (MIN (arg1_align, arg2_align)));
4207 enum machine_mode mode;
4210 /* Return the value in the proper mode for this function. */
4211 mode = TYPE_MODE (TREE_TYPE (exp));
4212 if (GET_MODE (result) == mode)
4215 return convert_to_mode (mode, result, 0);
4216 convert_move (target, result, 0);
4220 /* Expand the library call ourselves using a stabilized argument
4221 list to avoid re-evaluating the function's arguments twice. */
4222 #ifdef HAVE_cmpstrnsi
4225 fndecl = get_callee_fndecl (exp);
4226 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4227 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4228 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4229 return expand_call (fn, target, target == const0_rtx);
4235 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4236 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4237 the result in TARGET, if convenient. */
4240 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4241 ATTRIBUTE_UNUSED enum machine_mode mode)
4243 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4245 if (!validate_arglist (exp,
4246 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4249 /* If c_strlen can determine an expression for one of the string
4250 lengths, and it doesn't have side effects, then emit cmpstrnsi
4251 using length MIN(strlen(string)+1, arg3). */
4252 #ifdef HAVE_cmpstrnsi
4255 tree len, len1, len2;
4256 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4259 tree arg1 = CALL_EXPR_ARG (exp, 0);
4260 tree arg2 = CALL_EXPR_ARG (exp, 1);
4261 tree arg3 = CALL_EXPR_ARG (exp, 2);
4264 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4266 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4267 enum machine_mode insn_mode
4268 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4270 len1 = c_strlen (arg1, 1);
4271 len2 = c_strlen (arg2, 1);
4274 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4276 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4278 /* If we don't have a constant length for the first, use the length
4279 of the second, if we know it. We don't require a constant for
4280 this case; some cost analysis could be done if both are available
4281 but neither is constant. For now, assume they're equally cheap,
4282 unless one has side effects. If both strings have constant lengths,
4289 else if (TREE_SIDE_EFFECTS (len1))
4291 else if (TREE_SIDE_EFFECTS (len2))
4293 else if (TREE_CODE (len1) != INTEGER_CST)
4295 else if (TREE_CODE (len2) != INTEGER_CST)
4297 else if (tree_int_cst_lt (len1, len2))
4302 /* If both arguments have side effects, we cannot optimize. */
4303 if (!len || TREE_SIDE_EFFECTS (len))
4306 /* The actual new length parameter is MIN(len,arg3). */
4307 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4308 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4310 /* If we don't have POINTER_TYPE, call the function. */
4311 if (arg1_align == 0 || arg2_align == 0)
4314 /* Make a place to write the result of the instruction. */
4317 && REG_P (result) && GET_MODE (result) == insn_mode
4318 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4319 result = gen_reg_rtx (insn_mode);
4321 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4322 arg1 = builtin_save_expr (arg1);
4323 arg2 = builtin_save_expr (arg2);
4324 len = builtin_save_expr (len);
4326 arg1_rtx = get_memory_rtx (arg1, len);
4327 arg2_rtx = get_memory_rtx (arg2, len);
4328 arg3_rtx = expand_normal (len);
4329 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4330 GEN_INT (MIN (arg1_align, arg2_align)));
4335 /* Return the value in the proper mode for this function. */
4336 mode = TYPE_MODE (TREE_TYPE (exp));
4337 if (GET_MODE (result) == mode)
4340 return convert_to_mode (mode, result, 0);
4341 convert_move (target, result, 0);
4345 /* Expand the library call ourselves using a stabilized argument
4346 list to avoid re-evaluating the function's arguments twice. */
4347 fndecl = get_callee_fndecl (exp);
4348 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4349 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4350 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4351 return expand_call (fn, target, target == const0_rtx);
4357 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4358 if that's convenient. */
4361 expand_builtin_saveregs (void)
4365 /* Don't do __builtin_saveregs more than once in a function.
4366 Save the result of the first call and reuse it. */
4367 if (saveregs_value != 0)
4368 return saveregs_value;
4370 /* When this function is called, it means that registers must be
4371 saved on entry to this function. So we migrate the call to the
4372 first insn of this function. */
4376 /* Do whatever the machine needs done in this case. */
4377 val = targetm.calls.expand_builtin_saveregs ();
4382 saveregs_value = val;
4384 /* Put the insns after the NOTE that starts the function. If this
4385 is inside a start_sequence, make the outer-level insn chain current, so
4386 the code is placed at the start of the function. */
4387 push_topmost_sequence ();
4388 emit_insn_after (seq, entry_of_function ());
4389 pop_topmost_sequence ();
4394 /* __builtin_args_info (N) returns word N of the arg space info
4395 for the current function. The number and meanings of words
4396 is controlled by the definition of CUMULATIVE_ARGS. */
4399 expand_builtin_args_info (tree exp)
4401 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4402 int *word_ptr = (int *) &crtl->args.info;
4404 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4406 if (call_expr_nargs (exp) != 0)
4408 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4409 error ("argument of %<__builtin_args_info%> must be constant");
4412 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4414 if (wordnum < 0 || wordnum >= nwords)
4415 error ("argument of %<__builtin_args_info%> out of range");
4417 return GEN_INT (word_ptr[wordnum]);
4421 error ("missing argument in %<__builtin_args_info%>");
4426 /* Expand a call to __builtin_next_arg. */
4429 expand_builtin_next_arg (void)
4431 /* Checking arguments is already done in fold_builtin_next_arg
4432 that must be called before this function. */
4433 return expand_binop (ptr_mode, add_optab,
4434 crtl->args.internal_arg_pointer,
4435 crtl->args.arg_offset_rtx,
4436 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4439 /* Make it easier for the backends by protecting the valist argument
4440 from multiple evaluations. */
4443 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4445 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4447 gcc_assert (vatype != NULL_TREE);
4449 if (TREE_CODE (vatype) == ARRAY_TYPE)
4451 if (TREE_SIDE_EFFECTS (valist))
4452 valist = save_expr (valist);
4454 /* For this case, the backends will be expecting a pointer to
4455 vatype, but it's possible we've actually been given an array
4456 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4458 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4460 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4461 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4470 if (! TREE_SIDE_EFFECTS (valist))
4473 pt = build_pointer_type (vatype);
4474 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4475 TREE_SIDE_EFFECTS (valist) = 1;
4478 if (TREE_SIDE_EFFECTS (valist))
4479 valist = save_expr (valist);
4480 valist = build_fold_indirect_ref_loc (loc, valist);
4486 /* The "standard" definition of va_list is void*. */
4489 std_build_builtin_va_list (void)
4491 return ptr_type_node;
4494 /* The "standard" abi va_list is va_list_type_node. */
4497 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4499 return va_list_type_node;
4502 /* The "standard" type of va_list is va_list_type_node. */
4505 std_canonical_va_list_type (tree type)
4509 if (INDIRECT_REF_P (type))
4510 type = TREE_TYPE (type);
4511 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4512 type = TREE_TYPE (type);
4513 wtype = va_list_type_node;
4515 /* Treat structure va_list types. */
4516 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4517 htype = TREE_TYPE (htype);
4518 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4520 /* If va_list is an array type, the argument may have decayed
4521 to a pointer type, e.g. by being passed to another function.
4522 In that case, unwrap both types so that we can compare the
4523 underlying records. */
4524 if (TREE_CODE (htype) == ARRAY_TYPE
4525 || POINTER_TYPE_P (htype))
4527 wtype = TREE_TYPE (wtype);
4528 htype = TREE_TYPE (htype);
4531 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4532 return va_list_type_node;
4537 /* The "standard" implementation of va_start: just assign `nextarg' to
4541 std_expand_builtin_va_start (tree valist, rtx nextarg)
4543 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4544 convert_move (va_r, nextarg, 0);
4547 /* Expand EXP, a call to __builtin_va_start. */
4550 expand_builtin_va_start (tree exp)
4554 location_t loc = EXPR_LOCATION (exp);
4556 if (call_expr_nargs (exp) < 2)
4558 error_at (loc, "too few arguments to function %<va_start%>");
4562 if (fold_builtin_next_arg (exp, true))
4565 nextarg = expand_builtin_next_arg ();
4566 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4568 if (targetm.expand_builtin_va_start)
4569 targetm.expand_builtin_va_start (valist, nextarg);
4571 std_expand_builtin_va_start (valist, nextarg);
4576 /* The "standard" implementation of va_arg: read the value from the
4577 current (padded) address and increment by the (padded) size. */
4580 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4583 tree addr, t, type_size, rounded_size, valist_tmp;
4584 unsigned HOST_WIDE_INT align, boundary;
4587 #ifdef ARGS_GROW_DOWNWARD
4588 /* All of the alignment and movement below is for args-grow-up machines.
4589 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4590 implement their own specialized gimplify_va_arg_expr routines. */
4594 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4596 type = build_pointer_type (type);
4598 align = PARM_BOUNDARY / BITS_PER_UNIT;
4599 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4601 /* When we align parameter on stack for caller, if the parameter
4602 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4603 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4604 here with caller. */
4605 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4606 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4608 boundary /= BITS_PER_UNIT;
4610 /* Hoist the valist value into a temporary for the moment. */
4611 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4613 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4614 requires greater alignment, we must perform dynamic alignment. */
4615 if (boundary > align
4616 && !integer_zerop (TYPE_SIZE (type)))
4618 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4619 fold_build2 (POINTER_PLUS_EXPR,
4621 valist_tmp, size_int (boundary - 1)));
4622 gimplify_and_add (t, pre_p);
4624 t = fold_convert (sizetype, valist_tmp);
4625 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4626 fold_convert (TREE_TYPE (valist),
4627 fold_build2 (BIT_AND_EXPR, sizetype, t,
4628 size_int (-boundary))));
4629 gimplify_and_add (t, pre_p);
4634 /* If the actual alignment is less than the alignment of the type,
4635 adjust the type accordingly so that we don't assume strict alignment
4636 when dereferencing the pointer. */
4637 boundary *= BITS_PER_UNIT;
4638 if (boundary < TYPE_ALIGN (type))
4640 type = build_variant_type_copy (type);
4641 TYPE_ALIGN (type) = boundary;
4644 /* Compute the rounded size of the type. */
4645 type_size = size_in_bytes (type);
4646 rounded_size = round_up (type_size, align);
4648 /* Reduce rounded_size so it's sharable with the postqueue. */
4649 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4653 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4655 /* Small args are padded downward. */
4656 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4657 rounded_size, size_int (align));
4658 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4659 size_binop (MINUS_EXPR, rounded_size, type_size));
4660 addr = fold_build2 (POINTER_PLUS_EXPR,
4661 TREE_TYPE (addr), addr, t);
4664 /* Compute new value for AP. */
4665 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4666 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4667 gimplify_and_add (t, pre_p);
4669 addr = fold_convert (build_pointer_type (type), addr);
4672 addr = build_va_arg_indirect_ref (addr);
4674 return build_va_arg_indirect_ref (addr);
4677 /* Build an indirect-ref expression over the given TREE, which represents a
4678 piece of a va_arg() expansion. */
4680 build_va_arg_indirect_ref (tree addr)
4682 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4684 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4690 /* Return a dummy expression of type TYPE in order to keep going after an
4694 dummy_object (tree type)
4696 tree t = build_int_cst (build_pointer_type (type), 0);
4697 return build1 (INDIRECT_REF, type, t);
4700 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4701 builtin function, but a very special sort of operator. */
4703 enum gimplify_status
4704 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4706 tree promoted_type, have_va_type;
4707 tree valist = TREE_OPERAND (*expr_p, 0);
4708 tree type = TREE_TYPE (*expr_p);
4710 location_t loc = EXPR_LOCATION (*expr_p);
4712 /* Verify that valist is of the proper type. */
4713 have_va_type = TREE_TYPE (valist);
4714 if (have_va_type == error_mark_node)
4716 have_va_type = targetm.canonical_va_list_type (have_va_type);
4718 if (have_va_type == NULL_TREE)
4720 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4724 /* Generate a diagnostic for requesting data of a type that cannot
4725 be passed through `...' due to type promotion at the call site. */
4726 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4729 static bool gave_help;
4732 /* Unfortunately, this is merely undefined, rather than a constraint
4733 violation, so we cannot make this an error. If this call is never
4734 executed, the program is still strictly conforming. */
4735 warned = warning_at (loc, 0,
4736 "%qT is promoted to %qT when passed through %<...%>",
4737 type, promoted_type);
4738 if (!gave_help && warned)
4741 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4742 promoted_type, type);
4745 /* We can, however, treat "undefined" any way we please.
4746 Call abort to encourage the user to fix the program. */
4748 inform (loc, "if this code is reached, the program will abort");
4749 /* Before the abort, allow the evaluation of the va_list
4750 expression to exit or longjmp. */
4751 gimplify_and_add (valist, pre_p);
4752 t = build_call_expr_loc (loc,
4753 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4754 gimplify_and_add (t, pre_p);
4756 /* This is dead code, but go ahead and finish so that the
4757 mode of the result comes out right. */
4758 *expr_p = dummy_object (type);
4763 /* Make it easier for the backends by protecting the valist argument
4764 from multiple evaluations. */
4765 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4767 /* For this case, the backends will be expecting a pointer to
4768 TREE_TYPE (abi), but it's possible we've
4769 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4771 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4773 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4774 valist = fold_convert_loc (loc, p1,
4775 build_fold_addr_expr_loc (loc, valist));
4778 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4781 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4783 if (!targetm.gimplify_va_arg_expr)
4784 /* FIXME: Once most targets are converted we should merely
4785 assert this is non-null. */
4788 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4793 /* Expand EXP, a call to __builtin_va_end. */
4796 expand_builtin_va_end (tree exp)
4798 tree valist = CALL_EXPR_ARG (exp, 0);
4800 /* Evaluate for side effects, if needed. I hate macros that don't
4802 if (TREE_SIDE_EFFECTS (valist))
4803 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4808 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4809 builtin rather than just as an assignment in stdarg.h because of the
4810 nastiness of array-type va_list types. */
4813 expand_builtin_va_copy (tree exp)
4816 location_t loc = EXPR_LOCATION (exp);
4818 dst = CALL_EXPR_ARG (exp, 0);
4819 src = CALL_EXPR_ARG (exp, 1);
4821 dst = stabilize_va_list_loc (loc, dst, 1);
4822 src = stabilize_va_list_loc (loc, src, 0);
4824 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4826 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4828 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4829 TREE_SIDE_EFFECTS (t) = 1;
4830 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4834 rtx dstb, srcb, size;
4836 /* Evaluate to pointers. */
4837 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4838 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4839 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4840 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4842 dstb = convert_memory_address (Pmode, dstb);
4843 srcb = convert_memory_address (Pmode, srcb);
4845 /* "Dereference" to BLKmode memories. */
4846 dstb = gen_rtx_MEM (BLKmode, dstb);
4847 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4848 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4849 srcb = gen_rtx_MEM (BLKmode, srcb);
4850 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4851 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4854 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4860 /* Expand a call to one of the builtin functions __builtin_frame_address or
4861 __builtin_return_address. */
4864 expand_builtin_frame_address (tree fndecl, tree exp)
4866 /* The argument must be a nonnegative integer constant.
4867 It counts the number of frames to scan up the stack.
4868 The value is the return address saved in that frame. */
4869 if (call_expr_nargs (exp) == 0)
4870 /* Warning about missing arg was already issued. */
4872 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4874 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4875 error ("invalid argument to %<__builtin_frame_address%>");
4877 error ("invalid argument to %<__builtin_return_address%>");
4883 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4884 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4886 /* Some ports cannot access arbitrary stack frames. */
4889 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4890 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4892 warning (0, "unsupported argument to %<__builtin_return_address%>");
4896 /* For __builtin_frame_address, return what we've got. */
4897 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4901 && ! CONSTANT_P (tem))
4902 tem = copy_to_mode_reg (Pmode, tem);
4907 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4908 we failed and the caller should emit a normal call, otherwise try to get
4909 the result in TARGET, if convenient. */
4912 expand_builtin_alloca (tree exp, rtx target)
4917 /* Emit normal call if marked not-inlineable. */
4918 if (CALL_CANNOT_INLINE_P (exp))
4921 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4924 /* Compute the argument. */
4925 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4927 /* Allocate the desired space. */
4928 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4929 result = convert_memory_address (ptr_mode, result);
4934 /* Expand a call to a bswap builtin with argument ARG0. MODE
4935 is the mode to expand with. */
4938 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4940 enum machine_mode mode;
4944 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4947 arg = CALL_EXPR_ARG (exp, 0);
4948 mode = TYPE_MODE (TREE_TYPE (arg));
4949 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4951 target = expand_unop (mode, bswap_optab, op0, target, 1);
4953 gcc_assert (target);
4955 return convert_to_mode (mode, target, 0);
4958 /* Expand a call to a unary builtin in EXP.
4959 Return NULL_RTX if a normal call should be emitted rather than expanding the
4960 function in-line. If convenient, the result should be placed in TARGET.
4961 SUBTARGET may be used as the target for computing one of EXP's operands. */
4964 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4965 rtx subtarget, optab op_optab)
4969 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4972 /* Compute the argument. */
4973 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4974 VOIDmode, EXPAND_NORMAL);
4975 /* Compute op, into TARGET if possible.
4976 Set TARGET to wherever the result comes back. */
4977 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4978 op_optab, op0, target, 1);
4979 gcc_assert (target);
4981 return convert_to_mode (target_mode, target, 0);
4984 /* Expand a call to __builtin_expect. We just return our argument
4985 as the builtin_expect semantic should've been already executed by
4986 tree branch prediction pass. */
4989 expand_builtin_expect (tree exp, rtx target)
4993 if (call_expr_nargs (exp) < 2)
4995 arg = CALL_EXPR_ARG (exp, 0);
4997 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4998 /* When guessing was done, the hints should be already stripped away. */
4999 gcc_assert (!flag_guess_branch_prob
5000 || optimize == 0 || errorcount || sorrycount);
5005 expand_builtin_trap (void)
5009 emit_insn (gen_trap ());
5012 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5016 /* Expand a call to __builtin_unreachable. We do nothing except emit
5017 a barrier saying that control flow will not pass here.
5019 It is the responsibility of the program being compiled to ensure
5020 that control flow does never reach __builtin_unreachable. */
5022 expand_builtin_unreachable (void)
5027 /* Expand EXP, a call to fabs, fabsf or fabsl.
5028 Return NULL_RTX if a normal call should be emitted rather than expanding
5029 the function inline. If convenient, the result should be placed
5030 in TARGET. SUBTARGET may be used as the target for computing
5034 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5036 enum machine_mode mode;
5040 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5043 arg = CALL_EXPR_ARG (exp, 0);
5044 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5045 mode = TYPE_MODE (TREE_TYPE (arg));
5046 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5047 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5050 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5051 Return NULL is a normal call should be emitted rather than expanding the
5052 function inline. If convenient, the result should be placed in TARGET.
5053 SUBTARGET may be used as the target for computing the operand. */
5056 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5061 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5064 arg = CALL_EXPR_ARG (exp, 0);
5065 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5067 arg = CALL_EXPR_ARG (exp, 1);
5068 op1 = expand_normal (arg);
5070 return expand_copysign (op0, op1, target);
5073 /* Create a new constant string literal and return a char* pointer to it.
5074 The STRING_CST value is the LEN characters at STR. */
5076 build_string_literal (int len, const char *str)
5078 tree t, elem, index, type;
5080 t = build_string (len, str);
5081 elem = build_type_variant (char_type_node, 1, 0);
5082 index = build_index_type (size_int (len - 1));
5083 type = build_array_type (elem, index);
5084 TREE_TYPE (t) = type;
5085 TREE_CONSTANT (t) = 1;
5086 TREE_READONLY (t) = 1;
5087 TREE_STATIC (t) = 1;
5089 type = build_pointer_type (elem);
5090 t = build1 (ADDR_EXPR, type,
5091 build4 (ARRAY_REF, elem,
5092 t, integer_zero_node, NULL_TREE, NULL_TREE));
5096 /* Expand a call to either the entry or exit function profiler. */
5099 expand_builtin_profile_func (bool exitp)
5101 rtx this_rtx, which;
5103 this_rtx = DECL_RTL (current_function_decl);
5104 gcc_assert (MEM_P (this_rtx));
5105 this_rtx = XEXP (this_rtx, 0);
5108 which = profile_function_exit_libfunc;
5110 which = profile_function_entry_libfunc;
5112 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5113 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5120 /* Expand a call to __builtin___clear_cache. */
5123 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5125 #ifndef HAVE_clear_cache
5126 #ifdef CLEAR_INSN_CACHE
5127 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5128 does something. Just do the default expansion to a call to
5132 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5133 does nothing. There is no need to call it. Do nothing. */
5135 #endif /* CLEAR_INSN_CACHE */
5137 /* We have a "clear_cache" insn, and it will handle everything. */
5139 rtx begin_rtx, end_rtx;
5140 enum insn_code icode;
5142 /* We must not expand to a library call. If we did, any
5143 fallback library function in libgcc that might contain a call to
5144 __builtin___clear_cache() would recurse infinitely. */
5145 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5147 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5151 if (HAVE_clear_cache)
5153 icode = CODE_FOR_clear_cache;
5155 begin = CALL_EXPR_ARG (exp, 0);
5156 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5157 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5158 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5159 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5161 end = CALL_EXPR_ARG (exp, 1);
5162 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5163 end_rtx = convert_memory_address (Pmode, end_rtx);
5164 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5165 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5167 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5170 #endif /* HAVE_clear_cache */
5173 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5176 round_trampoline_addr (rtx tramp)
5178 rtx temp, addend, mask;
5180 /* If we don't need too much alignment, we'll have been guaranteed
5181 proper alignment by get_trampoline_type. */
5182 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5185 /* Round address up to desired boundary. */
5186 temp = gen_reg_rtx (Pmode);
5187 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5188 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5190 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5191 temp, 0, OPTAB_LIB_WIDEN);
5192 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5193 temp, 0, OPTAB_LIB_WIDEN);
5199 expand_builtin_init_trampoline (tree exp)
5201 tree t_tramp, t_func, t_chain;
5202 rtx m_tramp, r_tramp, r_chain, tmp;
5204 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5205 POINTER_TYPE, VOID_TYPE))
5208 t_tramp = CALL_EXPR_ARG (exp, 0);
5209 t_func = CALL_EXPR_ARG (exp, 1);
5210 t_chain = CALL_EXPR_ARG (exp, 2);
5212 r_tramp = expand_normal (t_tramp);
5213 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5214 MEM_NOTRAP_P (m_tramp) = 1;
5216 /* The TRAMP argument should be the address of a field within the
5217 local function's FRAME decl. Let's see if we can fill in the
5218 to fill in the MEM_ATTRs for this memory. */
5219 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5220 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5223 tmp = round_trampoline_addr (r_tramp);
5226 m_tramp = change_address (m_tramp, BLKmode, tmp);
5227 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5228 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5231 /* The FUNC argument should be the address of the nested function.
5232 Extract the actual function decl to pass to the hook. */
5233 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5234 t_func = TREE_OPERAND (t_func, 0);
5235 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5237 r_chain = expand_normal (t_chain);
5239 /* Generate insns to initialize the trampoline. */
5240 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5242 trampolines_created = 1;
5247 expand_builtin_adjust_trampoline (tree exp)
5251 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5254 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5255 tramp = round_trampoline_addr (tramp);
5256 if (targetm.calls.trampoline_adjust_address)
5257 tramp = targetm.calls.trampoline_adjust_address (tramp);
5262 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5263 function. The function first checks whether the back end provides
5264 an insn to implement signbit for the respective mode. If not, it
5265 checks whether the floating point format of the value is such that
5266 the sign bit can be extracted. If that is not the case, the
5267 function returns NULL_RTX to indicate that a normal call should be
5268 emitted rather than expanding the function in-line. EXP is the
5269 expression that is a call to the builtin function; if convenient,
5270 the result should be placed in TARGET. */
5272 expand_builtin_signbit (tree exp, rtx target)
5274 const struct real_format *fmt;
5275 enum machine_mode fmode, imode, rmode;
5278 enum insn_code icode;
5280 location_t loc = EXPR_LOCATION (exp);
5282 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5285 arg = CALL_EXPR_ARG (exp, 0);
5286 fmode = TYPE_MODE (TREE_TYPE (arg));
5287 rmode = TYPE_MODE (TREE_TYPE (exp));
5288 fmt = REAL_MODE_FORMAT (fmode);
5290 arg = builtin_save_expr (arg);
5292 /* Expand the argument yielding a RTX expression. */
5293 temp = expand_normal (arg);
5295 /* Check if the back end provides an insn that handles signbit for the
5297 icode = signbit_optab->handlers [(int) fmode].insn_code;
5298 if (icode != CODE_FOR_nothing)
5300 rtx last = get_last_insn ();
5301 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5302 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5304 delete_insns_since (last);
5307 /* For floating point formats without a sign bit, implement signbit
5309 bitpos = fmt->signbit_ro;
5312 /* But we can't do this if the format supports signed zero. */
5313 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5316 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5317 build_real (TREE_TYPE (arg), dconst0));
5318 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5321 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5323 imode = int_mode_for_mode (fmode);
5324 if (imode == BLKmode)
5326 temp = gen_lowpart (imode, temp);
5331 /* Handle targets with different FP word orders. */
5332 if (FLOAT_WORDS_BIG_ENDIAN)
5333 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5335 word = bitpos / BITS_PER_WORD;
5336 temp = operand_subword_force (temp, word, fmode);
5337 bitpos = bitpos % BITS_PER_WORD;
5340 /* Force the intermediate word_mode (or narrower) result into a
5341 register. This avoids attempting to create paradoxical SUBREGs
5342 of floating point modes below. */
5343 temp = force_reg (imode, temp);
5345 /* If the bitpos is within the "result mode" lowpart, the operation
5346 can be implement with a single bitwise AND. Otherwise, we need
5347 a right shift and an AND. */
5349 if (bitpos < GET_MODE_BITSIZE (rmode))
5351 double_int mask = double_int_setbit (double_int_zero, bitpos);
5353 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5354 temp = gen_lowpart (rmode, temp);
5355 temp = expand_binop (rmode, and_optab, temp,
5356 immed_double_int_const (mask, rmode),
5357 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5361 /* Perform a logical right shift to place the signbit in the least
5362 significant bit, then truncate the result to the desired mode
5363 and mask just this bit. */
5364 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5365 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5366 temp = gen_lowpart (rmode, temp);
5367 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5368 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5374 /* Expand fork or exec calls. TARGET is the desired target of the
5375 call. EXP is the call. FN is the
5376 identificator of the actual function. IGNORE is nonzero if the
5377 value is to be ignored. */
5380 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5385 /* If we are not profiling, just call the function. */
5386 if (!profile_arc_flag)
5389 /* Otherwise call the wrapper. This should be equivalent for the rest of
5390 compiler, so the code does not diverge, and the wrapper may run the
5391 code necessary for keeping the profiling sane. */
5393 switch (DECL_FUNCTION_CODE (fn))
5396 id = get_identifier ("__gcov_fork");
5399 case BUILT_IN_EXECL:
5400 id = get_identifier ("__gcov_execl");
5403 case BUILT_IN_EXECV:
5404 id = get_identifier ("__gcov_execv");
5407 case BUILT_IN_EXECLP:
5408 id = get_identifier ("__gcov_execlp");
5411 case BUILT_IN_EXECLE:
5412 id = get_identifier ("__gcov_execle");
5415 case BUILT_IN_EXECVP:
5416 id = get_identifier ("__gcov_execvp");
5419 case BUILT_IN_EXECVE:
5420 id = get_identifier ("__gcov_execve");
5427 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5428 FUNCTION_DECL, id, TREE_TYPE (fn));
5429 DECL_EXTERNAL (decl) = 1;
5430 TREE_PUBLIC (decl) = 1;
5431 DECL_ARTIFICIAL (decl) = 1;
5432 TREE_NOTHROW (decl) = 1;
5433 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5434 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5435 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5436 return expand_call (call, target, ignore);
5441 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5442 the pointer in these functions is void*, the tree optimizers may remove
5443 casts. The mode computed in expand_builtin isn't reliable either, due
5444 to __sync_bool_compare_and_swap.
5446 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5447 group of builtins. This gives us log2 of the mode size. */
5449 static inline enum machine_mode
5450 get_builtin_sync_mode (int fcode_diff)
5452 /* The size is not negotiable, so ask not to get BLKmode in return
5453 if the target indicates that a smaller size would be better. */
5454 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5457 /* Expand the memory expression LOC and return the appropriate memory operand
5458 for the builtin_sync operations. */
5461 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5465 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5466 addr = convert_memory_address (Pmode, addr);
5468 /* Note that we explicitly do not want any alias information for this
5469 memory, so that we kill all other live memories. Otherwise we don't
5470 satisfy the full barrier semantics of the intrinsic. */
5471 mem = validize_mem (gen_rtx_MEM (mode, addr));
5473 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5474 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5475 MEM_VOLATILE_P (mem) = 1;
5480 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5481 EXP is the CALL_EXPR. CODE is the rtx code
5482 that corresponds to the arithmetic or logical operation from the name;
5483 an exception here is that NOT actually means NAND. TARGET is an optional
5484 place for us to store the results; AFTER is true if this is the
5485 fetch_and_xxx form. IGNORE is true if we don't actually care about
5486 the result of the operation at all. */
5489 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5490 enum rtx_code code, bool after,
5491 rtx target, bool ignore)
5494 enum machine_mode old_mode;
5495 location_t loc = EXPR_LOCATION (exp);
5497 if (code == NOT && warn_sync_nand)
5499 tree fndecl = get_callee_fndecl (exp);
5500 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5502 static bool warned_f_a_n, warned_n_a_f;
5506 case BUILT_IN_FETCH_AND_NAND_1:
5507 case BUILT_IN_FETCH_AND_NAND_2:
5508 case BUILT_IN_FETCH_AND_NAND_4:
5509 case BUILT_IN_FETCH_AND_NAND_8:
5510 case BUILT_IN_FETCH_AND_NAND_16:
5515 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5516 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5517 warned_f_a_n = true;
5520 case BUILT_IN_NAND_AND_FETCH_1:
5521 case BUILT_IN_NAND_AND_FETCH_2:
5522 case BUILT_IN_NAND_AND_FETCH_4:
5523 case BUILT_IN_NAND_AND_FETCH_8:
5524 case BUILT_IN_NAND_AND_FETCH_16:
5529 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5530 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5531 warned_n_a_f = true;
5539 /* Expand the operands. */
5540 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5542 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5543 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5544 of CONST_INTs, where we know the old_mode only from the call argument. */
5545 old_mode = GET_MODE (val);
5546 if (old_mode == VOIDmode)
5547 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5548 val = convert_modes (mode, old_mode, val, 1);
5551 return expand_sync_operation (mem, val, code);
5553 return expand_sync_fetch_operation (mem, val, code, after, target);
5556 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5557 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5558 true if this is the boolean form. TARGET is a place for us to store the
5559 results; this is NOT optional if IS_BOOL is true. */
5562 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5563 bool is_bool, rtx target)
5565 rtx old_val, new_val, mem;
5566 enum machine_mode old_mode;
5568 /* Expand the operands. */
5569 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5572 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5573 mode, EXPAND_NORMAL);
5574 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5575 of CONST_INTs, where we know the old_mode only from the call argument. */
5576 old_mode = GET_MODE (old_val);
5577 if (old_mode == VOIDmode)
5578 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5579 old_val = convert_modes (mode, old_mode, old_val, 1);
5581 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5582 mode, EXPAND_NORMAL);
5583 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5584 of CONST_INTs, where we know the old_mode only from the call argument. */
5585 old_mode = GET_MODE (new_val);
5586 if (old_mode == VOIDmode)
5587 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5588 new_val = convert_modes (mode, old_mode, new_val, 1);
5591 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5593 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5596 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5597 general form is actually an atomic exchange, and some targets only
5598 support a reduced form with the second argument being a constant 1.
5599 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5603 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5607 enum machine_mode old_mode;
5609 /* Expand the operands. */
5610 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5611 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5612 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5613 of CONST_INTs, where we know the old_mode only from the call argument. */
5614 old_mode = GET_MODE (val);
5615 if (old_mode == VOIDmode)
5616 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5617 val = convert_modes (mode, old_mode, val, 1);
5619 return expand_sync_lock_test_and_set (mem, val, target);
5622 /* Expand the __sync_synchronize intrinsic. */
5625 expand_builtin_synchronize (void)
5628 VEC (tree, gc) *v_clobbers;
5630 #ifdef HAVE_memory_barrier
5631 if (HAVE_memory_barrier)
5633 emit_insn (gen_memory_barrier ());
5638 if (synchronize_libfunc != NULL_RTX)
5640 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5644 /* If no explicit memory barrier instruction is available, create an
5645 empty asm stmt with a memory clobber. */
5646 v_clobbers = VEC_alloc (tree, gc, 1);
5647 VEC_quick_push (tree, v_clobbers,
5648 tree_cons (NULL, build_string (6, "memory"), NULL));
5649 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5650 gimple_asm_set_volatile (x, true);
5651 expand_asm_stmt (x);
5654 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5657 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5659 enum insn_code icode;
5661 rtx val = const0_rtx;
5663 /* Expand the operands. */
5664 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5666 /* If there is an explicit operation in the md file, use it. */
5667 icode = sync_lock_release[mode];
5668 if (icode != CODE_FOR_nothing)
5670 if (!insn_data[icode].operand[1].predicate (val, mode))
5671 val = force_reg (mode, val);
5673 insn = GEN_FCN (icode) (mem, val);
5681 /* Otherwise we can implement this operation by emitting a barrier
5682 followed by a store of zero. */
5683 expand_builtin_synchronize ();
5684 emit_move_insn (mem, val);
5687 /* Expand an expression EXP that calls a built-in function,
5688 with result going to TARGET if that's convenient
5689 (and in mode MODE if that's convenient).
5690 SUBTARGET may be used as the target for computing one of EXP's operands.
5691 IGNORE is nonzero if the value is to be ignored. */
5694 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5697 tree fndecl = get_callee_fndecl (exp);
5698 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5699 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5701 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5702 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5704 /* When not optimizing, generate calls to library functions for a certain
5707 && !called_as_built_in (fndecl)
5708 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5709 && fcode != BUILT_IN_ALLOCA
5710 && fcode != BUILT_IN_FREE)
5711 return expand_call (exp, target, ignore);
5713 /* The built-in function expanders test for target == const0_rtx
5714 to determine whether the function's result will be ignored. */
5716 target = const0_rtx;
5718 /* If the result of a pure or const built-in function is ignored, and
5719 none of its arguments are volatile, we can avoid expanding the
5720 built-in call and just evaluate the arguments for side-effects. */
5721 if (target == const0_rtx
5722 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5724 bool volatilep = false;
5726 call_expr_arg_iterator iter;
5728 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5729 if (TREE_THIS_VOLATILE (arg))
5737 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5738 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5745 CASE_FLT_FN (BUILT_IN_FABS):
5746 target = expand_builtin_fabs (exp, target, subtarget);
5751 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5752 target = expand_builtin_copysign (exp, target, subtarget);
5757 /* Just do a normal library call if we were unable to fold
5759 CASE_FLT_FN (BUILT_IN_CABS):
5762 CASE_FLT_FN (BUILT_IN_EXP):
5763 CASE_FLT_FN (BUILT_IN_EXP10):
5764 CASE_FLT_FN (BUILT_IN_POW10):
5765 CASE_FLT_FN (BUILT_IN_EXP2):
5766 CASE_FLT_FN (BUILT_IN_EXPM1):
5767 CASE_FLT_FN (BUILT_IN_LOGB):
5768 CASE_FLT_FN (BUILT_IN_LOG):
5769 CASE_FLT_FN (BUILT_IN_LOG10):
5770 CASE_FLT_FN (BUILT_IN_LOG2):
5771 CASE_FLT_FN (BUILT_IN_LOG1P):
5772 CASE_FLT_FN (BUILT_IN_TAN):
5773 CASE_FLT_FN (BUILT_IN_ASIN):
5774 CASE_FLT_FN (BUILT_IN_ACOS):
5775 CASE_FLT_FN (BUILT_IN_ATAN):
5776 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5777 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5778 because of possible accuracy problems. */
5779 if (! flag_unsafe_math_optimizations)
5781 CASE_FLT_FN (BUILT_IN_SQRT):
5782 CASE_FLT_FN (BUILT_IN_FLOOR):
5783 CASE_FLT_FN (BUILT_IN_CEIL):
5784 CASE_FLT_FN (BUILT_IN_TRUNC):
5785 CASE_FLT_FN (BUILT_IN_ROUND):
5786 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5787 CASE_FLT_FN (BUILT_IN_RINT):
5788 target = expand_builtin_mathfn (exp, target, subtarget);
5793 CASE_FLT_FN (BUILT_IN_ILOGB):
5794 if (! flag_unsafe_math_optimizations)
5796 CASE_FLT_FN (BUILT_IN_ISINF):
5797 CASE_FLT_FN (BUILT_IN_FINITE):
5798 case BUILT_IN_ISFINITE:
5799 case BUILT_IN_ISNORMAL:
5800 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5805 CASE_FLT_FN (BUILT_IN_LCEIL):
5806 CASE_FLT_FN (BUILT_IN_LLCEIL):
5807 CASE_FLT_FN (BUILT_IN_LFLOOR):
5808 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5809 target = expand_builtin_int_roundingfn (exp, target);
5814 CASE_FLT_FN (BUILT_IN_LRINT):
5815 CASE_FLT_FN (BUILT_IN_LLRINT):
5816 CASE_FLT_FN (BUILT_IN_LROUND):
5817 CASE_FLT_FN (BUILT_IN_LLROUND):
5818 target = expand_builtin_int_roundingfn_2 (exp, target);
5823 CASE_FLT_FN (BUILT_IN_POW):
5824 target = expand_builtin_pow (exp, target, subtarget);
5829 CASE_FLT_FN (BUILT_IN_POWI):
5830 target = expand_builtin_powi (exp, target, subtarget);
5835 CASE_FLT_FN (BUILT_IN_ATAN2):
5836 CASE_FLT_FN (BUILT_IN_LDEXP):
5837 CASE_FLT_FN (BUILT_IN_SCALB):
5838 CASE_FLT_FN (BUILT_IN_SCALBN):
5839 CASE_FLT_FN (BUILT_IN_SCALBLN):
5840 if (! flag_unsafe_math_optimizations)
5843 CASE_FLT_FN (BUILT_IN_FMOD):
5844 CASE_FLT_FN (BUILT_IN_REMAINDER):
5845 CASE_FLT_FN (BUILT_IN_DREM):
5846 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5851 CASE_FLT_FN (BUILT_IN_CEXPI):
5852 target = expand_builtin_cexpi (exp, target, subtarget);
5853 gcc_assert (target);
5856 CASE_FLT_FN (BUILT_IN_SIN):
5857 CASE_FLT_FN (BUILT_IN_COS):
5858 if (! flag_unsafe_math_optimizations)
5860 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5865 CASE_FLT_FN (BUILT_IN_SINCOS):
5866 if (! flag_unsafe_math_optimizations)
5868 target = expand_builtin_sincos (exp);
5873 case BUILT_IN_APPLY_ARGS:
5874 return expand_builtin_apply_args ();
5876 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5877 FUNCTION with a copy of the parameters described by
5878 ARGUMENTS, and ARGSIZE. It returns a block of memory
5879 allocated on the stack into which is stored all the registers
5880 that might possibly be used for returning the result of a
5881 function. ARGUMENTS is the value returned by
5882 __builtin_apply_args. ARGSIZE is the number of bytes of
5883 arguments that must be copied. ??? How should this value be
5884 computed? We'll also need a safe worst case value for varargs
5886 case BUILT_IN_APPLY:
5887 if (!validate_arglist (exp, POINTER_TYPE,
5888 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5889 && !validate_arglist (exp, REFERENCE_TYPE,
5890 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5896 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5897 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5898 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5900 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5903 /* __builtin_return (RESULT) causes the function to return the
5904 value described by RESULT. RESULT is address of the block of
5905 memory returned by __builtin_apply. */
5906 case BUILT_IN_RETURN:
5907 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5908 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5911 case BUILT_IN_SAVEREGS:
5912 return expand_builtin_saveregs ();
5914 case BUILT_IN_ARGS_INFO:
5915 return expand_builtin_args_info (exp);
5917 case BUILT_IN_VA_ARG_PACK:
5918 /* All valid uses of __builtin_va_arg_pack () are removed during
5920 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5923 case BUILT_IN_VA_ARG_PACK_LEN:
5924 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5926 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5929 /* Return the address of the first anonymous stack arg. */
5930 case BUILT_IN_NEXT_ARG:
5931 if (fold_builtin_next_arg (exp, false))
5933 return expand_builtin_next_arg ();
5935 case BUILT_IN_CLEAR_CACHE:
5936 target = expand_builtin___clear_cache (exp);
5941 case BUILT_IN_CLASSIFY_TYPE:
5942 return expand_builtin_classify_type (exp);
5944 case BUILT_IN_CONSTANT_P:
5947 case BUILT_IN_FRAME_ADDRESS:
5948 case BUILT_IN_RETURN_ADDRESS:
5949 return expand_builtin_frame_address (fndecl, exp);
5951 /* Returns the address of the area where the structure is returned.
5953 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5954 if (call_expr_nargs (exp) != 0
5955 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5956 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5959 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5961 case BUILT_IN_ALLOCA:
5962 target = expand_builtin_alloca (exp, target);
5967 case BUILT_IN_STACK_SAVE:
5968 return expand_stack_save ();
5970 case BUILT_IN_STACK_RESTORE:
5971 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5974 case BUILT_IN_BSWAP32:
5975 case BUILT_IN_BSWAP64:
5976 target = expand_builtin_bswap (exp, target, subtarget);
5982 CASE_INT_FN (BUILT_IN_FFS):
5983 case BUILT_IN_FFSIMAX:
5984 target = expand_builtin_unop (target_mode, exp, target,
5985 subtarget, ffs_optab);
5990 CASE_INT_FN (BUILT_IN_CLZ):
5991 case BUILT_IN_CLZIMAX:
5992 target = expand_builtin_unop (target_mode, exp, target,
5993 subtarget, clz_optab);
5998 CASE_INT_FN (BUILT_IN_CTZ):
5999 case BUILT_IN_CTZIMAX:
6000 target = expand_builtin_unop (target_mode, exp, target,
6001 subtarget, ctz_optab);
6006 CASE_INT_FN (BUILT_IN_POPCOUNT):
6007 case BUILT_IN_POPCOUNTIMAX:
6008 target = expand_builtin_unop (target_mode, exp, target,
6009 subtarget, popcount_optab);
6014 CASE_INT_FN (BUILT_IN_PARITY):
6015 case BUILT_IN_PARITYIMAX:
6016 target = expand_builtin_unop (target_mode, exp, target,
6017 subtarget, parity_optab);
6022 case BUILT_IN_STRLEN:
6023 target = expand_builtin_strlen (exp, target, target_mode);
6028 case BUILT_IN_STRCPY:
6029 target = expand_builtin_strcpy (exp, target);
6034 case BUILT_IN_STRNCPY:
6035 target = expand_builtin_strncpy (exp, target);
6040 case BUILT_IN_STPCPY:
6041 target = expand_builtin_stpcpy (exp, target, mode);
6046 case BUILT_IN_MEMCPY:
6047 target = expand_builtin_memcpy (exp, target);
6052 case BUILT_IN_MEMPCPY:
6053 target = expand_builtin_mempcpy (exp, target, mode);
6058 case BUILT_IN_MEMSET:
6059 target = expand_builtin_memset (exp, target, mode);
6064 case BUILT_IN_BZERO:
6065 target = expand_builtin_bzero (exp);
6070 case BUILT_IN_STRCMP:
6071 target = expand_builtin_strcmp (exp, target);
6076 case BUILT_IN_STRNCMP:
6077 target = expand_builtin_strncmp (exp, target, mode);
6083 case BUILT_IN_MEMCMP:
6084 target = expand_builtin_memcmp (exp, target, mode);
6089 case BUILT_IN_SETJMP:
6090 /* This should have been lowered to the builtins below. */
6093 case BUILT_IN_SETJMP_SETUP:
6094 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6095 and the receiver label. */
6096 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6098 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6099 VOIDmode, EXPAND_NORMAL);
6100 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6101 rtx label_r = label_rtx (label);
6103 /* This is copied from the handling of non-local gotos. */
6104 expand_builtin_setjmp_setup (buf_addr, label_r);
6105 nonlocal_goto_handler_labels
6106 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6107 nonlocal_goto_handler_labels);
6108 /* ??? Do not let expand_label treat us as such since we would
6109 not want to be both on the list of non-local labels and on
6110 the list of forced labels. */
6111 FORCED_LABEL (label) = 0;
6116 case BUILT_IN_SETJMP_DISPATCHER:
6117 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6118 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6120 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6121 rtx label_r = label_rtx (label);
6123 /* Remove the dispatcher label from the list of non-local labels
6124 since the receiver labels have been added to it above. */
6125 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6130 case BUILT_IN_SETJMP_RECEIVER:
6131 /* __builtin_setjmp_receiver is passed the receiver label. */
6132 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6134 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6135 rtx label_r = label_rtx (label);
6137 expand_builtin_setjmp_receiver (label_r);
6142 /* __builtin_longjmp is passed a pointer to an array of five words.
6143 It's similar to the C library longjmp function but works with
6144 __builtin_setjmp above. */
6145 case BUILT_IN_LONGJMP:
6146 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6148 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6149 VOIDmode, EXPAND_NORMAL);
6150 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6152 if (value != const1_rtx)
6154 error ("%<__builtin_longjmp%> second argument must be 1");
6158 expand_builtin_longjmp (buf_addr, value);
6163 case BUILT_IN_NONLOCAL_GOTO:
6164 target = expand_builtin_nonlocal_goto (exp);
6169 /* This updates the setjmp buffer that is its argument with the value
6170 of the current stack pointer. */
6171 case BUILT_IN_UPDATE_SETJMP_BUF:
6172 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6175 = expand_normal (CALL_EXPR_ARG (exp, 0));
6177 expand_builtin_update_setjmp_buf (buf_addr);
6183 expand_builtin_trap ();
6186 case BUILT_IN_UNREACHABLE:
6187 expand_builtin_unreachable ();
6190 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6191 case BUILT_IN_SIGNBITD32:
6192 case BUILT_IN_SIGNBITD64:
6193 case BUILT_IN_SIGNBITD128:
6194 target = expand_builtin_signbit (exp, target);
6199 /* Various hooks for the DWARF 2 __throw routine. */
6200 case BUILT_IN_UNWIND_INIT:
6201 expand_builtin_unwind_init ();
6203 case BUILT_IN_DWARF_CFA:
6204 return virtual_cfa_rtx;
6205 #ifdef DWARF2_UNWIND_INFO
6206 case BUILT_IN_DWARF_SP_COLUMN:
6207 return expand_builtin_dwarf_sp_column ();
6208 case BUILT_IN_INIT_DWARF_REG_SIZES:
6209 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6212 case BUILT_IN_FROB_RETURN_ADDR:
6213 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6214 case BUILT_IN_EXTRACT_RETURN_ADDR:
6215 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6216 case BUILT_IN_EH_RETURN:
6217 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6218 CALL_EXPR_ARG (exp, 1));
6220 #ifdef EH_RETURN_DATA_REGNO
6221 case BUILT_IN_EH_RETURN_DATA_REGNO:
6222 return expand_builtin_eh_return_data_regno (exp);
6224 case BUILT_IN_EXTEND_POINTER:
6225 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6226 case BUILT_IN_EH_POINTER:
6227 return expand_builtin_eh_pointer (exp);
6228 case BUILT_IN_EH_FILTER:
6229 return expand_builtin_eh_filter (exp);
6230 case BUILT_IN_EH_COPY_VALUES:
6231 return expand_builtin_eh_copy_values (exp);
6233 case BUILT_IN_VA_START:
6234 return expand_builtin_va_start (exp);
6235 case BUILT_IN_VA_END:
6236 return expand_builtin_va_end (exp);
6237 case BUILT_IN_VA_COPY:
6238 return expand_builtin_va_copy (exp);
6239 case BUILT_IN_EXPECT:
6240 return expand_builtin_expect (exp, target);
6241 case BUILT_IN_PREFETCH:
6242 expand_builtin_prefetch (exp);
6245 case BUILT_IN_PROFILE_FUNC_ENTER:
6246 return expand_builtin_profile_func (false);
6247 case BUILT_IN_PROFILE_FUNC_EXIT:
6248 return expand_builtin_profile_func (true);
6250 case BUILT_IN_INIT_TRAMPOLINE:
6251 return expand_builtin_init_trampoline (exp);
6252 case BUILT_IN_ADJUST_TRAMPOLINE:
6253 return expand_builtin_adjust_trampoline (exp);
6256 case BUILT_IN_EXECL:
6257 case BUILT_IN_EXECV:
6258 case BUILT_IN_EXECLP:
6259 case BUILT_IN_EXECLE:
6260 case BUILT_IN_EXECVP:
6261 case BUILT_IN_EXECVE:
6262 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6267 case BUILT_IN_FETCH_AND_ADD_1:
6268 case BUILT_IN_FETCH_AND_ADD_2:
6269 case BUILT_IN_FETCH_AND_ADD_4:
6270 case BUILT_IN_FETCH_AND_ADD_8:
6271 case BUILT_IN_FETCH_AND_ADD_16:
6272 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6273 target = expand_builtin_sync_operation (mode, exp, PLUS,
6274 false, target, ignore);
6279 case BUILT_IN_FETCH_AND_SUB_1:
6280 case BUILT_IN_FETCH_AND_SUB_2:
6281 case BUILT_IN_FETCH_AND_SUB_4:
6282 case BUILT_IN_FETCH_AND_SUB_8:
6283 case BUILT_IN_FETCH_AND_SUB_16:
6284 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6285 target = expand_builtin_sync_operation (mode, exp, MINUS,
6286 false, target, ignore);
6291 case BUILT_IN_FETCH_AND_OR_1:
6292 case BUILT_IN_FETCH_AND_OR_2:
6293 case BUILT_IN_FETCH_AND_OR_4:
6294 case BUILT_IN_FETCH_AND_OR_8:
6295 case BUILT_IN_FETCH_AND_OR_16:
6296 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6297 target = expand_builtin_sync_operation (mode, exp, IOR,
6298 false, target, ignore);
6303 case BUILT_IN_FETCH_AND_AND_1:
6304 case BUILT_IN_FETCH_AND_AND_2:
6305 case BUILT_IN_FETCH_AND_AND_4:
6306 case BUILT_IN_FETCH_AND_AND_8:
6307 case BUILT_IN_FETCH_AND_AND_16:
6308 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6309 target = expand_builtin_sync_operation (mode, exp, AND,
6310 false, target, ignore);
6315 case BUILT_IN_FETCH_AND_XOR_1:
6316 case BUILT_IN_FETCH_AND_XOR_2:
6317 case BUILT_IN_FETCH_AND_XOR_4:
6318 case BUILT_IN_FETCH_AND_XOR_8:
6319 case BUILT_IN_FETCH_AND_XOR_16:
6320 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6321 target = expand_builtin_sync_operation (mode, exp, XOR,
6322 false, target, ignore);
6327 case BUILT_IN_FETCH_AND_NAND_1:
6328 case BUILT_IN_FETCH_AND_NAND_2:
6329 case BUILT_IN_FETCH_AND_NAND_4:
6330 case BUILT_IN_FETCH_AND_NAND_8:
6331 case BUILT_IN_FETCH_AND_NAND_16:
6332 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6333 target = expand_builtin_sync_operation (mode, exp, NOT,
6334 false, target, ignore);
6339 case BUILT_IN_ADD_AND_FETCH_1:
6340 case BUILT_IN_ADD_AND_FETCH_2:
6341 case BUILT_IN_ADD_AND_FETCH_4:
6342 case BUILT_IN_ADD_AND_FETCH_8:
6343 case BUILT_IN_ADD_AND_FETCH_16:
6344 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6345 target = expand_builtin_sync_operation (mode, exp, PLUS,
6346 true, target, ignore);
6351 case BUILT_IN_SUB_AND_FETCH_1:
6352 case BUILT_IN_SUB_AND_FETCH_2:
6353 case BUILT_IN_SUB_AND_FETCH_4:
6354 case BUILT_IN_SUB_AND_FETCH_8:
6355 case BUILT_IN_SUB_AND_FETCH_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6357 target = expand_builtin_sync_operation (mode, exp, MINUS,
6358 true, target, ignore);
6363 case BUILT_IN_OR_AND_FETCH_1:
6364 case BUILT_IN_OR_AND_FETCH_2:
6365 case BUILT_IN_OR_AND_FETCH_4:
6366 case BUILT_IN_OR_AND_FETCH_8:
6367 case BUILT_IN_OR_AND_FETCH_16:
6368 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6369 target = expand_builtin_sync_operation (mode, exp, IOR,
6370 true, target, ignore);
6375 case BUILT_IN_AND_AND_FETCH_1:
6376 case BUILT_IN_AND_AND_FETCH_2:
6377 case BUILT_IN_AND_AND_FETCH_4:
6378 case BUILT_IN_AND_AND_FETCH_8:
6379 case BUILT_IN_AND_AND_FETCH_16:
6380 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6381 target = expand_builtin_sync_operation (mode, exp, AND,
6382 true, target, ignore);
6387 case BUILT_IN_XOR_AND_FETCH_1:
6388 case BUILT_IN_XOR_AND_FETCH_2:
6389 case BUILT_IN_XOR_AND_FETCH_4:
6390 case BUILT_IN_XOR_AND_FETCH_8:
6391 case BUILT_IN_XOR_AND_FETCH_16:
6392 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6393 target = expand_builtin_sync_operation (mode, exp, XOR,
6394 true, target, ignore);
6399 case BUILT_IN_NAND_AND_FETCH_1:
6400 case BUILT_IN_NAND_AND_FETCH_2:
6401 case BUILT_IN_NAND_AND_FETCH_4:
6402 case BUILT_IN_NAND_AND_FETCH_8:
6403 case BUILT_IN_NAND_AND_FETCH_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6405 target = expand_builtin_sync_operation (mode, exp, NOT,
6406 true, target, ignore);
6411 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6412 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6413 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6414 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6415 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6416 if (mode == VOIDmode)
6417 mode = TYPE_MODE (boolean_type_node);
6418 if (!target || !register_operand (target, mode))
6419 target = gen_reg_rtx (mode);
6421 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6422 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6427 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6428 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6429 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6430 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6431 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6432 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6433 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6438 case BUILT_IN_LOCK_TEST_AND_SET_1:
6439 case BUILT_IN_LOCK_TEST_AND_SET_2:
6440 case BUILT_IN_LOCK_TEST_AND_SET_4:
6441 case BUILT_IN_LOCK_TEST_AND_SET_8:
6442 case BUILT_IN_LOCK_TEST_AND_SET_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6444 target = expand_builtin_lock_test_and_set (mode, exp, target);
6449 case BUILT_IN_LOCK_RELEASE_1:
6450 case BUILT_IN_LOCK_RELEASE_2:
6451 case BUILT_IN_LOCK_RELEASE_4:
6452 case BUILT_IN_LOCK_RELEASE_8:
6453 case BUILT_IN_LOCK_RELEASE_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6455 expand_builtin_lock_release (mode, exp);
6458 case BUILT_IN_SYNCHRONIZE:
6459 expand_builtin_synchronize ();
6462 case BUILT_IN_OBJECT_SIZE:
6463 return expand_builtin_object_size (exp);
6465 case BUILT_IN_MEMCPY_CHK:
6466 case BUILT_IN_MEMPCPY_CHK:
6467 case BUILT_IN_MEMMOVE_CHK:
6468 case BUILT_IN_MEMSET_CHK:
6469 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6474 case BUILT_IN_STRCPY_CHK:
6475 case BUILT_IN_STPCPY_CHK:
6476 case BUILT_IN_STRNCPY_CHK:
6477 case BUILT_IN_STRCAT_CHK:
6478 case BUILT_IN_STRNCAT_CHK:
6479 case BUILT_IN_SNPRINTF_CHK:
6480 case BUILT_IN_VSNPRINTF_CHK:
6481 maybe_emit_chk_warning (exp, fcode);
6484 case BUILT_IN_SPRINTF_CHK:
6485 case BUILT_IN_VSPRINTF_CHK:
6486 maybe_emit_sprintf_chk_warning (exp, fcode);
6490 maybe_emit_free_warning (exp);
6493 default: /* just do library call, if unknown builtin */
6497 /* The switch statement above can drop through to cause the function
6498 to be called normally. */
6499 return expand_call (exp, target, ignore);
6502 /* Determine whether a tree node represents a call to a built-in
6503 function. If the tree T is a call to a built-in function with
6504 the right number of arguments of the appropriate types, return
6505 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6506 Otherwise the return value is END_BUILTINS. */
6508 enum built_in_function
6509 builtin_mathfn_code (const_tree t)
6511 const_tree fndecl, arg, parmlist;
6512 const_tree argtype, parmtype;
6513 const_call_expr_arg_iterator iter;
6515 if (TREE_CODE (t) != CALL_EXPR
6516 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6517 return END_BUILTINS;
6519 fndecl = get_callee_fndecl (t);
6520 if (fndecl == NULL_TREE
6521 || TREE_CODE (fndecl) != FUNCTION_DECL
6522 || ! DECL_BUILT_IN (fndecl)
6523 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6524 return END_BUILTINS;
6526 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6527 init_const_call_expr_arg_iterator (t, &iter);
6528 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6530 /* If a function doesn't take a variable number of arguments,
6531 the last element in the list will have type `void'. */
6532 parmtype = TREE_VALUE (parmlist);
6533 if (VOID_TYPE_P (parmtype))
6535 if (more_const_call_expr_args_p (&iter))
6536 return END_BUILTINS;
6537 return DECL_FUNCTION_CODE (fndecl);
6540 if (! more_const_call_expr_args_p (&iter))
6541 return END_BUILTINS;
6543 arg = next_const_call_expr_arg (&iter);
6544 argtype = TREE_TYPE (arg);
6546 if (SCALAR_FLOAT_TYPE_P (parmtype))
6548 if (! SCALAR_FLOAT_TYPE_P (argtype))
6549 return END_BUILTINS;
6551 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6553 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6554 return END_BUILTINS;
6556 else if (POINTER_TYPE_P (parmtype))
6558 if (! POINTER_TYPE_P (argtype))
6559 return END_BUILTINS;
6561 else if (INTEGRAL_TYPE_P (parmtype))
6563 if (! INTEGRAL_TYPE_P (argtype))
6564 return END_BUILTINS;
6567 return END_BUILTINS;
6570 /* Variable-length argument list. */
6571 return DECL_FUNCTION_CODE (fndecl);
6574 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6575 evaluate to a constant. */
6578 fold_builtin_constant_p (tree arg)
6580 /* We return 1 for a numeric type that's known to be a constant
6581 value at compile-time or for an aggregate type that's a
6582 literal constant. */
6585 /* If we know this is a constant, emit the constant of one. */
6586 if (CONSTANT_CLASS_P (arg)
6587 || (TREE_CODE (arg) == CONSTRUCTOR
6588 && TREE_CONSTANT (arg)))
6589 return integer_one_node;
6590 if (TREE_CODE (arg) == ADDR_EXPR)
6592 tree op = TREE_OPERAND (arg, 0);
6593 if (TREE_CODE (op) == STRING_CST
6594 || (TREE_CODE (op) == ARRAY_REF
6595 && integer_zerop (TREE_OPERAND (op, 1))
6596 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6597 return integer_one_node;
6600 /* If this expression has side effects, show we don't know it to be a
6601 constant. Likewise if it's a pointer or aggregate type since in
6602 those case we only want literals, since those are only optimized
6603 when generating RTL, not later.
6604 And finally, if we are compiling an initializer, not code, we
6605 need to return a definite result now; there's not going to be any
6606 more optimization done. */
6607 if (TREE_SIDE_EFFECTS (arg)
6608 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6609 || POINTER_TYPE_P (TREE_TYPE (arg))
6611 || folding_initializer)
6612 return integer_zero_node;
6617 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6618 return it as a truthvalue. */
6621 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6623 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6625 fn = built_in_decls[BUILT_IN_EXPECT];
6626 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6627 ret_type = TREE_TYPE (TREE_TYPE (fn));
6628 pred_type = TREE_VALUE (arg_types);
6629 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6631 pred = fold_convert_loc (loc, pred_type, pred);
6632 expected = fold_convert_loc (loc, expected_type, expected);
6633 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6635 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6636 build_int_cst (ret_type, 0));
6639 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6640 NULL_TREE if no simplification is possible. */
6643 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6646 enum tree_code code;
6648 /* If this is a builtin_expect within a builtin_expect keep the
6649 inner one. See through a comparison against a constant. It
6650 might have been added to create a thruthvalue. */
6652 if (COMPARISON_CLASS_P (inner)
6653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6654 inner = TREE_OPERAND (inner, 0);
6656 if (TREE_CODE (inner) == CALL_EXPR
6657 && (fndecl = get_callee_fndecl (inner))
6658 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6659 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6662 /* Distribute the expected value over short-circuiting operators.
6663 See through the cast from truthvalue_type_node to long. */
6665 while (TREE_CODE (inner) == NOP_EXPR
6666 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6667 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6668 inner = TREE_OPERAND (inner, 0);
6670 code = TREE_CODE (inner);
6671 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6673 tree op0 = TREE_OPERAND (inner, 0);
6674 tree op1 = TREE_OPERAND (inner, 1);
6676 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6677 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6678 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6680 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6683 /* If the argument isn't invariant then there's nothing else we can do. */
6684 if (!TREE_CONSTANT (arg0))
6687 /* If we expect that a comparison against the argument will fold to
6688 a constant return the constant. In practice, this means a true
6689 constant or the address of a non-weak symbol. */
6692 if (TREE_CODE (inner) == ADDR_EXPR)
6696 inner = TREE_OPERAND (inner, 0);
6698 while (TREE_CODE (inner) == COMPONENT_REF
6699 || TREE_CODE (inner) == ARRAY_REF);
6700 if ((TREE_CODE (inner) == VAR_DECL
6701 || TREE_CODE (inner) == FUNCTION_DECL)
6702 && DECL_WEAK (inner))
6706 /* Otherwise, ARG0 already has the proper type for the return value. */
6710 /* Fold a call to __builtin_classify_type with argument ARG. */
6713 fold_builtin_classify_type (tree arg)
6716 return build_int_cst (NULL_TREE, no_type_class);
6718 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6721 /* Fold a call to __builtin_strlen with argument ARG. */
6724 fold_builtin_strlen (location_t loc, tree type, tree arg)
6726 if (!validate_arg (arg, POINTER_TYPE))
6730 tree len = c_strlen (arg, 0);
6733 return fold_convert_loc (loc, type, len);
6739 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6742 fold_builtin_inf (location_t loc, tree type, int warn)
6744 REAL_VALUE_TYPE real;
6746 /* __builtin_inff is intended to be usable to define INFINITY on all
6747 targets. If an infinity is not available, INFINITY expands "to a
6748 positive constant of type float that overflows at translation
6749 time", footnote "In this case, using INFINITY will violate the
6750 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6751 Thus we pedwarn to ensure this constraint violation is
6753 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6754 pedwarn (loc, 0, "target format does not support infinity");
6757 return build_real (type, real);
6760 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6763 fold_builtin_nan (tree arg, tree type, int quiet)
6765 REAL_VALUE_TYPE real;
6768 if (!validate_arg (arg, POINTER_TYPE))
6770 str = c_getstr (arg);
6774 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6777 return build_real (type, real);
6780 /* Return true if the floating point expression T has an integer value.
6781 We also allow +Inf, -Inf and NaN to be considered integer values. */
6784 integer_valued_real_p (tree t)
6786 switch (TREE_CODE (t))
6793 return integer_valued_real_p (TREE_OPERAND (t, 0));
6798 return integer_valued_real_p (TREE_OPERAND (t, 1));
6805 return integer_valued_real_p (TREE_OPERAND (t, 0))
6806 && integer_valued_real_p (TREE_OPERAND (t, 1));
6809 return integer_valued_real_p (TREE_OPERAND (t, 1))
6810 && integer_valued_real_p (TREE_OPERAND (t, 2));
6813 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6817 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6818 if (TREE_CODE (type) == INTEGER_TYPE)
6820 if (TREE_CODE (type) == REAL_TYPE)
6821 return integer_valued_real_p (TREE_OPERAND (t, 0));
6826 switch (builtin_mathfn_code (t))
6828 CASE_FLT_FN (BUILT_IN_CEIL):
6829 CASE_FLT_FN (BUILT_IN_FLOOR):
6830 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6831 CASE_FLT_FN (BUILT_IN_RINT):
6832 CASE_FLT_FN (BUILT_IN_ROUND):
6833 CASE_FLT_FN (BUILT_IN_TRUNC):
6836 CASE_FLT_FN (BUILT_IN_FMIN):
6837 CASE_FLT_FN (BUILT_IN_FMAX):
6838 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6839 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6852 /* FNDECL is assumed to be a builtin where truncation can be propagated
6853 across (for instance floor((double)f) == (double)floorf (f).
6854 Do the transformation for a call with argument ARG. */
6857 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6859 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6861 if (!validate_arg (arg, REAL_TYPE))
6864 /* Integer rounding functions are idempotent. */
6865 if (fcode == builtin_mathfn_code (arg))
6868 /* If argument is already integer valued, and we don't need to worry
6869 about setting errno, there's no need to perform rounding. */
6870 if (! flag_errno_math && integer_valued_real_p (arg))
6875 tree arg0 = strip_float_extensions (arg);
6876 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6877 tree newtype = TREE_TYPE (arg0);
6880 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6881 && (decl = mathfn_built_in (newtype, fcode)))
6882 return fold_convert_loc (loc, ftype,
6883 build_call_expr_loc (loc, decl, 1,
6884 fold_convert_loc (loc,
6891 /* FNDECL is assumed to be builtin which can narrow the FP type of
6892 the argument, for instance lround((double)f) -> lroundf (f).
6893 Do the transformation for a call with argument ARG. */
6896 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6900 if (!validate_arg (arg, REAL_TYPE))
6903 /* If argument is already integer valued, and we don't need to worry
6904 about setting errno, there's no need to perform rounding. */
6905 if (! flag_errno_math && integer_valued_real_p (arg))
6906 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6907 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6911 tree ftype = TREE_TYPE (arg);
6912 tree arg0 = strip_float_extensions (arg);
6913 tree newtype = TREE_TYPE (arg0);
6916 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6917 && (decl = mathfn_built_in (newtype, fcode)))
6918 return build_call_expr_loc (loc, decl, 1,
6919 fold_convert_loc (loc, newtype, arg0));
6922 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6923 sizeof (long long) == sizeof (long). */
6924 if (TYPE_PRECISION (long_long_integer_type_node)
6925 == TYPE_PRECISION (long_integer_type_node))
6927 tree newfn = NULL_TREE;
6930 CASE_FLT_FN (BUILT_IN_LLCEIL):
6931 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6934 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6935 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6938 CASE_FLT_FN (BUILT_IN_LLROUND):
6939 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6942 CASE_FLT_FN (BUILT_IN_LLRINT):
6943 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6952 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6953 return fold_convert_loc (loc,
6954 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6961 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6962 return type. Return NULL_TREE if no simplification can be made. */
6965 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6969 if (!validate_arg (arg, COMPLEX_TYPE)
6970 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6973 /* Calculate the result when the argument is a constant. */
6974 if (TREE_CODE (arg) == COMPLEX_CST
6975 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6979 if (TREE_CODE (arg) == COMPLEX_EXPR)
6981 tree real = TREE_OPERAND (arg, 0);
6982 tree imag = TREE_OPERAND (arg, 1);
6984 /* If either part is zero, cabs is fabs of the other. */
6985 if (real_zerop (real))
6986 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6987 if (real_zerop (imag))
6988 return fold_build1_loc (loc, ABS_EXPR, type, real);
6990 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6991 if (flag_unsafe_math_optimizations
6992 && operand_equal_p (real, imag, OEP_PURE_SAME))
6994 const REAL_VALUE_TYPE sqrt2_trunc
6995 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6997 return fold_build2_loc (loc, MULT_EXPR, type,
6998 fold_build1_loc (loc, ABS_EXPR, type, real),
6999 build_real (type, sqrt2_trunc));
7003 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7004 if (TREE_CODE (arg) == NEGATE_EXPR
7005 || TREE_CODE (arg) == CONJ_EXPR)
7006 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7008 /* Don't do this when optimizing for size. */
7009 if (flag_unsafe_math_optimizations
7010 && optimize && optimize_function_for_speed_p (cfun))
7012 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7014 if (sqrtfn != NULL_TREE)
7016 tree rpart, ipart, result;
7018 arg = builtin_save_expr (arg);
7020 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7021 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7023 rpart = builtin_save_expr (rpart);
7024 ipart = builtin_save_expr (ipart);
7026 result = fold_build2_loc (loc, PLUS_EXPR, type,
7027 fold_build2_loc (loc, MULT_EXPR, type,
7029 fold_build2_loc (loc, MULT_EXPR, type,
7032 return build_call_expr_loc (loc, sqrtfn, 1, result);
7039 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7040 complex tree type of the result. If NEG is true, the imaginary
7041 zero is negative. */
7044 build_complex_cproj (tree type, bool neg)
7046 REAL_VALUE_TYPE rinf, rzero = dconst0;
7050 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7051 build_real (TREE_TYPE (type), rzero));
7054 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7055 return type. Return NULL_TREE if no simplification can be made. */
7058 fold_builtin_cproj (location_t loc, tree arg, tree type)
7060 if (!validate_arg (arg, COMPLEX_TYPE)
7061 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7064 /* If there are no infinities, return arg. */
7065 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7066 return non_lvalue_loc (loc, arg);
7068 /* Calculate the result when the argument is a constant. */
7069 if (TREE_CODE (arg) == COMPLEX_CST)
7071 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7072 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7074 if (real_isinf (real) || real_isinf (imag))
7075 return build_complex_cproj (type, imag->sign);
7079 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7081 tree real = TREE_OPERAND (arg, 0);
7082 tree imag = TREE_OPERAND (arg, 1);
7087 /* If the real part is inf and the imag part is known to be
7088 nonnegative, return (inf + 0i). Remember side-effects are
7089 possible in the imag part. */
7090 if (TREE_CODE (real) == REAL_CST
7091 && real_isinf (TREE_REAL_CST_PTR (real))
7092 && tree_expr_nonnegative_p (imag))
7093 return omit_one_operand_loc (loc, type,
7094 build_complex_cproj (type, false),
7097 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7098 Remember side-effects are possible in the real part. */
7099 if (TREE_CODE (imag) == REAL_CST
7100 && real_isinf (TREE_REAL_CST_PTR (imag)))
7102 omit_one_operand_loc (loc, type,
7103 build_complex_cproj (type, TREE_REAL_CST_PTR
7104 (imag)->sign), arg);
7110 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7111 Return NULL_TREE if no simplification can be made. */
7114 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7117 enum built_in_function fcode;
7120 if (!validate_arg (arg, REAL_TYPE))
7123 /* Calculate the result when the argument is a constant. */
7124 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7127 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7128 fcode = builtin_mathfn_code (arg);
7129 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7131 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7132 arg = fold_build2_loc (loc, MULT_EXPR, type,
7133 CALL_EXPR_ARG (arg, 0),
7134 build_real (type, dconsthalf));
7135 return build_call_expr_loc (loc, expfn, 1, arg);
7138 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7139 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7141 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7145 tree arg0 = CALL_EXPR_ARG (arg, 0);
7147 /* The inner root was either sqrt or cbrt. */
7148 /* This was a conditional expression but it triggered a bug
7150 REAL_VALUE_TYPE dconstroot;
7151 if (BUILTIN_SQRT_P (fcode))
7152 dconstroot = dconsthalf;
7154 dconstroot = dconst_third ();
7156 /* Adjust for the outer root. */
7157 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7158 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7159 tree_root = build_real (type, dconstroot);
7160 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7164 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7165 if (flag_unsafe_math_optimizations
7166 && (fcode == BUILT_IN_POW
7167 || fcode == BUILT_IN_POWF
7168 || fcode == BUILT_IN_POWL))
7170 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7171 tree arg0 = CALL_EXPR_ARG (arg, 0);
7172 tree arg1 = CALL_EXPR_ARG (arg, 1);
7174 if (!tree_expr_nonnegative_p (arg0))
7175 arg0 = build1 (ABS_EXPR, type, arg0);
7176 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7177 build_real (type, dconsthalf));
7178 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7184 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7185 Return NULL_TREE if no simplification can be made. */
7188 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7190 const enum built_in_function fcode = builtin_mathfn_code (arg);
7193 if (!validate_arg (arg, REAL_TYPE))
7196 /* Calculate the result when the argument is a constant. */
7197 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7200 if (flag_unsafe_math_optimizations)
7202 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7203 if (BUILTIN_EXPONENT_P (fcode))
7205 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7206 const REAL_VALUE_TYPE third_trunc =
7207 real_value_truncate (TYPE_MODE (type), dconst_third ());
7208 arg = fold_build2_loc (loc, MULT_EXPR, type,
7209 CALL_EXPR_ARG (arg, 0),
7210 build_real (type, third_trunc));
7211 return build_call_expr_loc (loc, expfn, 1, arg);
7214 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7215 if (BUILTIN_SQRT_P (fcode))
7217 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7221 tree arg0 = CALL_EXPR_ARG (arg, 0);
7223 REAL_VALUE_TYPE dconstroot = dconst_third ();
7225 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7226 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7227 tree_root = build_real (type, dconstroot);
7228 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7232 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7233 if (BUILTIN_CBRT_P (fcode))
7235 tree arg0 = CALL_EXPR_ARG (arg, 0);
7236 if (tree_expr_nonnegative_p (arg0))
7238 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7243 REAL_VALUE_TYPE dconstroot;
7245 real_arithmetic (&dconstroot, MULT_EXPR,
7246 dconst_third_ptr (), dconst_third_ptr ());
7247 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7248 tree_root = build_real (type, dconstroot);
7249 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7254 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7255 if (fcode == BUILT_IN_POW
7256 || fcode == BUILT_IN_POWF
7257 || fcode == BUILT_IN_POWL)
7259 tree arg00 = CALL_EXPR_ARG (arg, 0);
7260 tree arg01 = CALL_EXPR_ARG (arg, 1);
7261 if (tree_expr_nonnegative_p (arg00))
7263 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7264 const REAL_VALUE_TYPE dconstroot
7265 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7266 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7267 build_real (type, dconstroot));
7268 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7275 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7276 TYPE is the type of the return value. Return NULL_TREE if no
7277 simplification can be made. */
7280 fold_builtin_cos (location_t loc,
7281 tree arg, tree type, tree fndecl)
7285 if (!validate_arg (arg, REAL_TYPE))
7288 /* Calculate the result when the argument is a constant. */
7289 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7292 /* Optimize cos(-x) into cos (x). */
7293 if ((narg = fold_strip_sign_ops (arg)))
7294 return build_call_expr_loc (loc, fndecl, 1, narg);
7299 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7300 Return NULL_TREE if no simplification can be made. */
7303 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7305 if (validate_arg (arg, REAL_TYPE))
7309 /* Calculate the result when the argument is a constant. */
7310 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7313 /* Optimize cosh(-x) into cosh (x). */
7314 if ((narg = fold_strip_sign_ops (arg)))
7315 return build_call_expr_loc (loc, fndecl, 1, narg);
7321 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7322 argument ARG. TYPE is the type of the return value. Return
7323 NULL_TREE if no simplification can be made. */
7326 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7329 if (validate_arg (arg, COMPLEX_TYPE)
7330 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7334 /* Calculate the result when the argument is a constant. */
7335 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7338 /* Optimize fn(-x) into fn(x). */
7339 if ((tmp = fold_strip_sign_ops (arg)))
7340 return build_call_expr_loc (loc, fndecl, 1, tmp);
7346 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7347 Return NULL_TREE if no simplification can be made. */
7350 fold_builtin_tan (tree arg, tree type)
7352 enum built_in_function fcode;
7355 if (!validate_arg (arg, REAL_TYPE))
7358 /* Calculate the result when the argument is a constant. */
7359 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7362 /* Optimize tan(atan(x)) = x. */
7363 fcode = builtin_mathfn_code (arg);
7364 if (flag_unsafe_math_optimizations
7365 && (fcode == BUILT_IN_ATAN
7366 || fcode == BUILT_IN_ATANF
7367 || fcode == BUILT_IN_ATANL))
7368 return CALL_EXPR_ARG (arg, 0);
7373 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7374 NULL_TREE if no simplification can be made. */
7377 fold_builtin_sincos (location_t loc,
7378 tree arg0, tree arg1, tree arg2)
7383 if (!validate_arg (arg0, REAL_TYPE)
7384 || !validate_arg (arg1, POINTER_TYPE)
7385 || !validate_arg (arg2, POINTER_TYPE))
7388 type = TREE_TYPE (arg0);
7390 /* Calculate the result when the argument is a constant. */
7391 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7394 /* Canonicalize sincos to cexpi. */
7395 if (!TARGET_C99_FUNCTIONS)
7397 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7401 call = build_call_expr_loc (loc, fn, 1, arg0);
7402 call = builtin_save_expr (call);
7404 return build2 (COMPOUND_EXPR, void_type_node,
7405 build2 (MODIFY_EXPR, void_type_node,
7406 build_fold_indirect_ref_loc (loc, arg1),
7407 build1 (IMAGPART_EXPR, type, call)),
7408 build2 (MODIFY_EXPR, void_type_node,
7409 build_fold_indirect_ref_loc (loc, arg2),
7410 build1 (REALPART_EXPR, type, call)));
7413 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7414 NULL_TREE if no simplification can be made. */
7417 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7420 tree realp, imagp, ifn;
7423 if (!validate_arg (arg0, COMPLEX_TYPE)
7424 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7427 /* Calculate the result when the argument is a constant. */
7428 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7431 rtype = TREE_TYPE (TREE_TYPE (arg0));
7433 /* In case we can figure out the real part of arg0 and it is constant zero
7435 if (!TARGET_C99_FUNCTIONS)
7437 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7441 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7442 && real_zerop (realp))
7444 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7445 return build_call_expr_loc (loc, ifn, 1, narg);
7448 /* In case we can easily decompose real and imaginary parts split cexp
7449 to exp (r) * cexpi (i). */
7450 if (flag_unsafe_math_optimizations
7453 tree rfn, rcall, icall;
7455 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7459 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7463 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7464 icall = builtin_save_expr (icall);
7465 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7466 rcall = builtin_save_expr (rcall);
7467 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7468 fold_build2_loc (loc, MULT_EXPR, rtype,
7470 fold_build1_loc (loc, REALPART_EXPR,
7472 fold_build2_loc (loc, MULT_EXPR, rtype,
7474 fold_build1_loc (loc, IMAGPART_EXPR,
7481 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7482 Return NULL_TREE if no simplification can be made. */
7485 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7487 if (!validate_arg (arg, REAL_TYPE))
7490 /* Optimize trunc of constant value. */
7491 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7493 REAL_VALUE_TYPE r, x;
7494 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7496 x = TREE_REAL_CST (arg);
7497 real_trunc (&r, TYPE_MODE (type), &x);
7498 return build_real (type, r);
7501 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7504 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7505 Return NULL_TREE if no simplification can be made. */
7508 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7510 if (!validate_arg (arg, REAL_TYPE))
7513 /* Optimize floor of constant value. */
7514 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7518 x = TREE_REAL_CST (arg);
7519 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7521 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7524 real_floor (&r, TYPE_MODE (type), &x);
7525 return build_real (type, r);
7529 /* Fold floor (x) where x is nonnegative to trunc (x). */
7530 if (tree_expr_nonnegative_p (arg))
7532 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7534 return build_call_expr_loc (loc, truncfn, 1, arg);
7537 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7540 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7541 Return NULL_TREE if no simplification can be made. */
7544 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7546 if (!validate_arg (arg, REAL_TYPE))
7549 /* Optimize ceil of constant value. */
7550 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7554 x = TREE_REAL_CST (arg);
7555 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7557 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7560 real_ceil (&r, TYPE_MODE (type), &x);
7561 return build_real (type, r);
7565 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7568 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7569 Return NULL_TREE if no simplification can be made. */
7572 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7574 if (!validate_arg (arg, REAL_TYPE))
7577 /* Optimize round of constant value. */
7578 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7582 x = TREE_REAL_CST (arg);
7583 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7588 real_round (&r, TYPE_MODE (type), &x);
7589 return build_real (type, r);
7593 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7596 /* Fold function call to builtin lround, lroundf or lroundl (or the
7597 corresponding long long versions) and other rounding functions. ARG
7598 is the argument to the call. Return NULL_TREE if no simplification
7602 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7604 if (!validate_arg (arg, REAL_TYPE))
7607 /* Optimize lround of constant value. */
7608 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7610 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7612 if (real_isfinite (&x))
7614 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7615 tree ftype = TREE_TYPE (arg);
7616 unsigned HOST_WIDE_INT lo2;
7617 HOST_WIDE_INT hi, lo;
7620 switch (DECL_FUNCTION_CODE (fndecl))
7622 CASE_FLT_FN (BUILT_IN_LFLOOR):
7623 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7624 real_floor (&r, TYPE_MODE (ftype), &x);
7627 CASE_FLT_FN (BUILT_IN_LCEIL):
7628 CASE_FLT_FN (BUILT_IN_LLCEIL):
7629 real_ceil (&r, TYPE_MODE (ftype), &x);
7632 CASE_FLT_FN (BUILT_IN_LROUND):
7633 CASE_FLT_FN (BUILT_IN_LLROUND):
7634 real_round (&r, TYPE_MODE (ftype), &x);
7641 REAL_VALUE_TO_INT (&lo, &hi, r);
7642 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7643 return build_int_cst_wide (itype, lo2, hi);
7647 switch (DECL_FUNCTION_CODE (fndecl))
7649 CASE_FLT_FN (BUILT_IN_LFLOOR):
7650 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7651 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7652 if (tree_expr_nonnegative_p (arg))
7653 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7654 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7659 return fold_fixed_mathfn (loc, fndecl, arg);
7662 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7663 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7664 the argument to the call. Return NULL_TREE if no simplification can
7668 fold_builtin_bitop (tree fndecl, tree arg)
7670 if (!validate_arg (arg, INTEGER_TYPE))
7673 /* Optimize for constant argument. */
7674 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7676 HOST_WIDE_INT hi, width, result;
7677 unsigned HOST_WIDE_INT lo;
7680 type = TREE_TYPE (arg);
7681 width = TYPE_PRECISION (type);
7682 lo = TREE_INT_CST_LOW (arg);
7684 /* Clear all the bits that are beyond the type's precision. */
7685 if (width > HOST_BITS_PER_WIDE_INT)
7687 hi = TREE_INT_CST_HIGH (arg);
7688 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7689 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7694 if (width < HOST_BITS_PER_WIDE_INT)
7695 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7698 switch (DECL_FUNCTION_CODE (fndecl))
7700 CASE_INT_FN (BUILT_IN_FFS):
7702 result = exact_log2 (lo & -lo) + 1;
7704 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7709 CASE_INT_FN (BUILT_IN_CLZ):
7711 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7713 result = width - floor_log2 (lo) - 1;
7714 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7718 CASE_INT_FN (BUILT_IN_CTZ):
7720 result = exact_log2 (lo & -lo);
7722 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7723 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7727 CASE_INT_FN (BUILT_IN_POPCOUNT):
7730 result++, lo &= lo - 1;
7732 result++, hi &= hi - 1;
7735 CASE_INT_FN (BUILT_IN_PARITY):
7738 result++, lo &= lo - 1;
7740 result++, hi &= hi - 1;
7748 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7754 /* Fold function call to builtin_bswap and the long and long long
7755 variants. Return NULL_TREE if no simplification can be made. */
7757 fold_builtin_bswap (tree fndecl, tree arg)
7759 if (! validate_arg (arg, INTEGER_TYPE))
7762 /* Optimize constant value. */
7763 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7765 HOST_WIDE_INT hi, width, r_hi = 0;
7766 unsigned HOST_WIDE_INT lo, r_lo = 0;
7769 type = TREE_TYPE (arg);
7770 width = TYPE_PRECISION (type);
7771 lo = TREE_INT_CST_LOW (arg);
7772 hi = TREE_INT_CST_HIGH (arg);
7774 switch (DECL_FUNCTION_CODE (fndecl))
7776 case BUILT_IN_BSWAP32:
7777 case BUILT_IN_BSWAP64:
7781 for (s = 0; s < width; s += 8)
7783 int d = width - s - 8;
7784 unsigned HOST_WIDE_INT byte;
7786 if (s < HOST_BITS_PER_WIDE_INT)
7787 byte = (lo >> s) & 0xff;
7789 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7791 if (d < HOST_BITS_PER_WIDE_INT)
7794 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7804 if (width < HOST_BITS_PER_WIDE_INT)
7805 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7807 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7813 /* A subroutine of fold_builtin to fold the various logarithmic
7814 functions. Return NULL_TREE if no simplification can me made.
7815 FUNC is the corresponding MPFR logarithm function. */
7818 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7819 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7821 if (validate_arg (arg, REAL_TYPE))
7823 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7825 const enum built_in_function fcode = builtin_mathfn_code (arg);
7827 /* Calculate the result when the argument is a constant. */
7828 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7831 /* Special case, optimize logN(expN(x)) = x. */
7832 if (flag_unsafe_math_optimizations
7833 && ((func == mpfr_log
7834 && (fcode == BUILT_IN_EXP
7835 || fcode == BUILT_IN_EXPF
7836 || fcode == BUILT_IN_EXPL))
7837 || (func == mpfr_log2
7838 && (fcode == BUILT_IN_EXP2
7839 || fcode == BUILT_IN_EXP2F
7840 || fcode == BUILT_IN_EXP2L))
7841 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7842 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7844 /* Optimize logN(func()) for various exponential functions. We
7845 want to determine the value "x" and the power "exponent" in
7846 order to transform logN(x**exponent) into exponent*logN(x). */
7847 if (flag_unsafe_math_optimizations)
7849 tree exponent = 0, x = 0;
7853 CASE_FLT_FN (BUILT_IN_EXP):
7854 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7855 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7857 exponent = CALL_EXPR_ARG (arg, 0);
7859 CASE_FLT_FN (BUILT_IN_EXP2):
7860 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7861 x = build_real (type, dconst2);
7862 exponent = CALL_EXPR_ARG (arg, 0);
7864 CASE_FLT_FN (BUILT_IN_EXP10):
7865 CASE_FLT_FN (BUILT_IN_POW10):
7866 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7868 REAL_VALUE_TYPE dconst10;
7869 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7870 x = build_real (type, dconst10);
7872 exponent = CALL_EXPR_ARG (arg, 0);
7874 CASE_FLT_FN (BUILT_IN_SQRT):
7875 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7876 x = CALL_EXPR_ARG (arg, 0);
7877 exponent = build_real (type, dconsthalf);
7879 CASE_FLT_FN (BUILT_IN_CBRT):
7880 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7881 x = CALL_EXPR_ARG (arg, 0);
7882 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7885 CASE_FLT_FN (BUILT_IN_POW):
7886 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7887 x = CALL_EXPR_ARG (arg, 0);
7888 exponent = CALL_EXPR_ARG (arg, 1);
7894 /* Now perform the optimization. */
7897 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7898 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7906 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7907 NULL_TREE if no simplification can be made. */
7910 fold_builtin_hypot (location_t loc, tree fndecl,
7911 tree arg0, tree arg1, tree type)
7913 tree res, narg0, narg1;
7915 if (!validate_arg (arg0, REAL_TYPE)
7916 || !validate_arg (arg1, REAL_TYPE))
7919 /* Calculate the result when the argument is a constant. */
7920 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7923 /* If either argument to hypot has a negate or abs, strip that off.
7924 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7925 narg0 = fold_strip_sign_ops (arg0);
7926 narg1 = fold_strip_sign_ops (arg1);
7929 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7930 narg1 ? narg1 : arg1);
7933 /* If either argument is zero, hypot is fabs of the other. */
7934 if (real_zerop (arg0))
7935 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7936 else if (real_zerop (arg1))
7937 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7939 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7940 if (flag_unsafe_math_optimizations
7941 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7943 const REAL_VALUE_TYPE sqrt2_trunc
7944 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7945 return fold_build2_loc (loc, MULT_EXPR, type,
7946 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7947 build_real (type, sqrt2_trunc));
7954 /* Fold a builtin function call to pow, powf, or powl. Return
7955 NULL_TREE if no simplification can be made. */
7957 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7961 if (!validate_arg (arg0, REAL_TYPE)
7962 || !validate_arg (arg1, REAL_TYPE))
7965 /* Calculate the result when the argument is a constant. */
7966 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7969 /* Optimize pow(1.0,y) = 1.0. */
7970 if (real_onep (arg0))
7971 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7973 if (TREE_CODE (arg1) == REAL_CST
7974 && !TREE_OVERFLOW (arg1))
7976 REAL_VALUE_TYPE cint;
7980 c = TREE_REAL_CST (arg1);
7982 /* Optimize pow(x,0.0) = 1.0. */
7983 if (REAL_VALUES_EQUAL (c, dconst0))
7984 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7987 /* Optimize pow(x,1.0) = x. */
7988 if (REAL_VALUES_EQUAL (c, dconst1))
7991 /* Optimize pow(x,-1.0) = 1.0/x. */
7992 if (REAL_VALUES_EQUAL (c, dconstm1))
7993 return fold_build2_loc (loc, RDIV_EXPR, type,
7994 build_real (type, dconst1), arg0);
7996 /* Optimize pow(x,0.5) = sqrt(x). */
7997 if (flag_unsafe_math_optimizations
7998 && REAL_VALUES_EQUAL (c, dconsthalf))
8000 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8002 if (sqrtfn != NULL_TREE)
8003 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8006 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8007 if (flag_unsafe_math_optimizations)
8009 const REAL_VALUE_TYPE dconstroot
8010 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8012 if (REAL_VALUES_EQUAL (c, dconstroot))
8014 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8015 if (cbrtfn != NULL_TREE)
8016 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8020 /* Check for an integer exponent. */
8021 n = real_to_integer (&c);
8022 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8023 if (real_identical (&c, &cint))
8025 /* Attempt to evaluate pow at compile-time, unless this should
8026 raise an exception. */
8027 if (TREE_CODE (arg0) == REAL_CST
8028 && !TREE_OVERFLOW (arg0)
8030 || (!flag_trapping_math && !flag_errno_math)
8031 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8036 x = TREE_REAL_CST (arg0);
8037 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8038 if (flag_unsafe_math_optimizations || !inexact)
8039 return build_real (type, x);
8042 /* Strip sign ops from even integer powers. */
8043 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8045 tree narg0 = fold_strip_sign_ops (arg0);
8047 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8052 if (flag_unsafe_math_optimizations)
8054 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8056 /* Optimize pow(expN(x),y) = expN(x*y). */
8057 if (BUILTIN_EXPONENT_P (fcode))
8059 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8060 tree arg = CALL_EXPR_ARG (arg0, 0);
8061 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8062 return build_call_expr_loc (loc, expfn, 1, arg);
8065 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8066 if (BUILTIN_SQRT_P (fcode))
8068 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8069 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8070 build_real (type, dconsthalf));
8071 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8074 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8075 if (BUILTIN_CBRT_P (fcode))
8077 tree arg = CALL_EXPR_ARG (arg0, 0);
8078 if (tree_expr_nonnegative_p (arg))
8080 const REAL_VALUE_TYPE dconstroot
8081 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8082 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8083 build_real (type, dconstroot));
8084 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8088 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8089 if (fcode == BUILT_IN_POW
8090 || fcode == BUILT_IN_POWF
8091 || fcode == BUILT_IN_POWL)
8093 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8094 if (tree_expr_nonnegative_p (arg00))
8096 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8097 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8098 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8106 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8107 Return NULL_TREE if no simplification can be made. */
8109 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8110 tree arg0, tree arg1, tree type)
8112 if (!validate_arg (arg0, REAL_TYPE)
8113 || !validate_arg (arg1, INTEGER_TYPE))
8116 /* Optimize pow(1.0,y) = 1.0. */
8117 if (real_onep (arg0))
8118 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8120 if (host_integerp (arg1, 0))
8122 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8124 /* Evaluate powi at compile-time. */
8125 if (TREE_CODE (arg0) == REAL_CST
8126 && !TREE_OVERFLOW (arg0))
8129 x = TREE_REAL_CST (arg0);
8130 real_powi (&x, TYPE_MODE (type), &x, c);
8131 return build_real (type, x);
8134 /* Optimize pow(x,0) = 1.0. */
8136 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8139 /* Optimize pow(x,1) = x. */
8143 /* Optimize pow(x,-1) = 1.0/x. */
8145 return fold_build2_loc (loc, RDIV_EXPR, type,
8146 build_real (type, dconst1), arg0);
8152 /* A subroutine of fold_builtin to fold the various exponent
8153 functions. Return NULL_TREE if no simplification can be made.
8154 FUNC is the corresponding MPFR exponent function. */
8157 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8158 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8160 if (validate_arg (arg, REAL_TYPE))
8162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8165 /* Calculate the result when the argument is a constant. */
8166 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8169 /* Optimize expN(logN(x)) = x. */
8170 if (flag_unsafe_math_optimizations)
8172 const enum built_in_function fcode = builtin_mathfn_code (arg);
8174 if ((func == mpfr_exp
8175 && (fcode == BUILT_IN_LOG
8176 || fcode == BUILT_IN_LOGF
8177 || fcode == BUILT_IN_LOGL))
8178 || (func == mpfr_exp2
8179 && (fcode == BUILT_IN_LOG2
8180 || fcode == BUILT_IN_LOG2F
8181 || fcode == BUILT_IN_LOG2L))
8182 || (func == mpfr_exp10
8183 && (fcode == BUILT_IN_LOG10
8184 || fcode == BUILT_IN_LOG10F
8185 || fcode == BUILT_IN_LOG10L)))
8186 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8193 /* Return true if VAR is a VAR_DECL or a component thereof. */
8196 var_decl_component_p (tree var)
8199 while (handled_component_p (inner))
8200 inner = TREE_OPERAND (inner, 0);
8201 return SSA_VAR_P (inner);
8204 /* Fold function call to builtin memset. Return
8205 NULL_TREE if no simplification can be made. */
8208 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8209 tree type, bool ignore)
8211 tree var, ret, etype;
8212 unsigned HOST_WIDE_INT length, cval;
8214 if (! validate_arg (dest, POINTER_TYPE)
8215 || ! validate_arg (c, INTEGER_TYPE)
8216 || ! validate_arg (len, INTEGER_TYPE))
8219 if (! host_integerp (len, 1))
8222 /* If the LEN parameter is zero, return DEST. */
8223 if (integer_zerop (len))
8224 return omit_one_operand_loc (loc, type, dest, c);
8226 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8231 if (TREE_CODE (var) != ADDR_EXPR)
8234 var = TREE_OPERAND (var, 0);
8235 if (TREE_THIS_VOLATILE (var))
8238 etype = TREE_TYPE (var);
8239 if (TREE_CODE (etype) == ARRAY_TYPE)
8240 etype = TREE_TYPE (etype);
8242 if (!INTEGRAL_TYPE_P (etype)
8243 && !POINTER_TYPE_P (etype))
8246 if (! var_decl_component_p (var))
8249 length = tree_low_cst (len, 1);
8250 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8251 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8255 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8258 if (integer_zerop (c))
8262 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8265 cval = tree_low_cst (c, 1);
8269 cval |= (cval << 31) << 1;
8272 ret = build_int_cst_type (etype, cval);
8273 var = build_fold_indirect_ref_loc (loc,
8274 fold_convert_loc (loc,
8275 build_pointer_type (etype),
8277 ret = build2 (MODIFY_EXPR, etype, var, ret);
8281 return omit_one_operand_loc (loc, type, dest, ret);
8284 /* Fold function call to builtin memset. Return
8285 NULL_TREE if no simplification can be made. */
8288 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8290 if (! validate_arg (dest, POINTER_TYPE)
8291 || ! validate_arg (size, INTEGER_TYPE))
8297 /* New argument list transforming bzero(ptr x, int y) to
8298 memset(ptr x, int 0, size_t y). This is done this way
8299 so that if it isn't expanded inline, we fallback to
8300 calling bzero instead of memset. */
8302 return fold_builtin_memset (loc, dest, integer_zero_node,
8303 fold_convert_loc (loc, sizetype, size),
8304 void_type_node, ignore);
8307 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8308 NULL_TREE if no simplification can be made.
8309 If ENDP is 0, return DEST (like memcpy).
8310 If ENDP is 1, return DEST+LEN (like mempcpy).
8311 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8312 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8316 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8317 tree len, tree type, bool ignore, int endp)
8319 tree destvar, srcvar, expr;
8321 if (! validate_arg (dest, POINTER_TYPE)
8322 || ! validate_arg (src, POINTER_TYPE)
8323 || ! validate_arg (len, INTEGER_TYPE))
8326 /* If the LEN parameter is zero, return DEST. */
8327 if (integer_zerop (len))
8328 return omit_one_operand_loc (loc, type, dest, src);
8330 /* If SRC and DEST are the same (and not volatile), return
8331 DEST{,+LEN,+LEN-1}. */
8332 if (operand_equal_p (src, dest, 0))
8336 tree srctype, desttype;
8337 int src_align, dest_align;
8341 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8342 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8344 /* Both DEST and SRC must be pointer types.
8345 ??? This is what old code did. Is the testing for pointer types
8348 If either SRC is readonly or length is 1, we can use memcpy. */
8349 if (!dest_align || !src_align)
8351 if (readonly_data_expr (src)
8352 || (host_integerp (len, 1)
8353 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8354 >= tree_low_cst (len, 1))))
8356 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8359 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8362 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8363 srcvar = build_fold_indirect_ref_loc (loc, src);
8364 destvar = build_fold_indirect_ref_loc (loc, dest);
8366 && !TREE_THIS_VOLATILE (srcvar)
8368 && !TREE_THIS_VOLATILE (destvar))
8370 tree src_base, dest_base, fn;
8371 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8372 HOST_WIDE_INT size = -1;
8373 HOST_WIDE_INT maxsize = -1;
8376 if (handled_component_p (src_base))
8377 src_base = get_ref_base_and_extent (src_base, &src_offset,
8379 dest_base = destvar;
8380 if (handled_component_p (dest_base))
8381 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8383 if (host_integerp (len, 1))
8385 maxsize = tree_low_cst (len, 1);
8387 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8390 maxsize *= BITS_PER_UNIT;
8394 if (SSA_VAR_P (src_base)
8395 && SSA_VAR_P (dest_base))
8397 if (operand_equal_p (src_base, dest_base, 0)
8398 && ranges_overlap_p (src_offset, maxsize,
8399 dest_offset, maxsize))
8402 else if (TREE_CODE (src_base) == INDIRECT_REF
8403 && TREE_CODE (dest_base) == INDIRECT_REF)
8405 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8406 TREE_OPERAND (dest_base, 0), 0)
8407 || ranges_overlap_p (src_offset, maxsize,
8408 dest_offset, maxsize))
8414 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8417 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8422 if (!host_integerp (len, 0))
8425 This logic lose for arguments like (type *)malloc (sizeof (type)),
8426 since we strip the casts of up to VOID return value from malloc.
8427 Perhaps we ought to inherit type from non-VOID argument here? */
8430 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8431 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8433 tree tem = TREE_OPERAND (src, 0);
8435 if (tem != TREE_OPERAND (src, 0))
8436 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8438 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8440 tree tem = TREE_OPERAND (dest, 0);
8442 if (tem != TREE_OPERAND (dest, 0))
8443 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8445 srctype = TREE_TYPE (TREE_TYPE (src));
8447 && TREE_CODE (srctype) == ARRAY_TYPE
8448 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8450 srctype = TREE_TYPE (srctype);
8452 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8454 desttype = TREE_TYPE (TREE_TYPE (dest));
8456 && TREE_CODE (desttype) == ARRAY_TYPE
8457 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8459 desttype = TREE_TYPE (desttype);
8461 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8463 if (!srctype || !desttype
8464 || !TYPE_SIZE_UNIT (srctype)
8465 || !TYPE_SIZE_UNIT (desttype)
8466 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8467 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8468 || TYPE_VOLATILE (srctype)
8469 || TYPE_VOLATILE (desttype))
8472 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8473 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8474 if (dest_align < (int) TYPE_ALIGN (desttype)
8475 || src_align < (int) TYPE_ALIGN (srctype))
8479 dest = builtin_save_expr (dest);
8482 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8484 srcvar = build_fold_indirect_ref_loc (loc, src);
8485 if (TREE_THIS_VOLATILE (srcvar))
8487 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8489 /* With memcpy, it is possible to bypass aliasing rules, so without
8490 this check i.e. execute/20060930-2.c would be misoptimized,
8491 because it use conflicting alias set to hold argument for the
8492 memcpy call. This check is probably unnecessary with
8493 -fno-strict-aliasing. Similarly for destvar. See also
8495 else if (!var_decl_component_p (srcvar))
8499 destvar = NULL_TREE;
8500 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8502 destvar = build_fold_indirect_ref_loc (loc, dest);
8503 if (TREE_THIS_VOLATILE (destvar))
8505 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8506 destvar = NULL_TREE;
8507 else if (!var_decl_component_p (destvar))
8508 destvar = NULL_TREE;
8511 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8514 if (srcvar == NULL_TREE)
8517 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8520 srctype = build_qualified_type (desttype, 0);
8521 if (src_align < (int) TYPE_ALIGN (srctype))
8523 if (AGGREGATE_TYPE_P (srctype)
8524 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8527 srctype = build_variant_type_copy (srctype);
8528 TYPE_ALIGN (srctype) = src_align;
8529 TYPE_USER_ALIGN (srctype) = 1;
8530 TYPE_PACKED (srctype) = 1;
8532 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8533 src = fold_convert_loc (loc, srcptype, src);
8534 srcvar = build_fold_indirect_ref_loc (loc, src);
8536 else if (destvar == NULL_TREE)
8539 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8542 desttype = build_qualified_type (srctype, 0);
8543 if (dest_align < (int) TYPE_ALIGN (desttype))
8545 if (AGGREGATE_TYPE_P (desttype)
8546 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8549 desttype = build_variant_type_copy (desttype);
8550 TYPE_ALIGN (desttype) = dest_align;
8551 TYPE_USER_ALIGN (desttype) = 1;
8552 TYPE_PACKED (desttype) = 1;
8554 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8555 dest = fold_convert_loc (loc, destptype, dest);
8556 destvar = build_fold_indirect_ref_loc (loc, dest);
8559 if (srctype == desttype
8560 || (gimple_in_ssa_p (cfun)
8561 && useless_type_conversion_p (desttype, srctype)))
8563 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8564 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8565 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8566 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8567 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8569 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8570 TREE_TYPE (destvar), srcvar);
8571 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8577 if (endp == 0 || endp == 3)
8578 return omit_one_operand_loc (loc, type, dest, expr);
8584 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8587 len = fold_convert_loc (loc, sizetype, len);
8588 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8589 dest = fold_convert_loc (loc, type, dest);
8591 dest = omit_one_operand_loc (loc, type, dest, expr);
8595 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8596 If LEN is not NULL, it represents the length of the string to be
8597 copied. Return NULL_TREE if no simplification can be made. */
8600 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8604 if (!validate_arg (dest, POINTER_TYPE)
8605 || !validate_arg (src, POINTER_TYPE))
8608 /* If SRC and DEST are the same (and not volatile), return DEST. */
8609 if (operand_equal_p (src, dest, 0))
8610 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8612 if (optimize_function_for_size_p (cfun))
8615 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8621 len = c_strlen (src, 1);
8622 if (! len || TREE_SIDE_EFFECTS (len))
8626 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8627 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8628 build_call_expr_loc (loc, fn, 3, dest, src, len));
8631 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8632 Return NULL_TREE if no simplification can be made. */
8635 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8637 tree fn, len, lenp1, call, type;
8639 if (!validate_arg (dest, POINTER_TYPE)
8640 || !validate_arg (src, POINTER_TYPE))
8643 len = c_strlen (src, 1);
8645 || TREE_CODE (len) != INTEGER_CST)
8648 if (optimize_function_for_size_p (cfun)
8649 /* If length is zero it's small enough. */
8650 && !integer_zerop (len))
8653 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8657 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8658 /* We use dest twice in building our expression. Save it from
8659 multiple expansions. */
8660 dest = builtin_save_expr (dest);
8661 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8663 type = TREE_TYPE (TREE_TYPE (fndecl));
8664 len = fold_convert_loc (loc, sizetype, len);
8665 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8666 dest = fold_convert_loc (loc, type, dest);
8667 dest = omit_one_operand_loc (loc, type, dest, call);
8671 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8672 If SLEN is not NULL, it represents the length of the source string.
8673 Return NULL_TREE if no simplification can be made. */
8676 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8677 tree src, tree len, tree slen)
8681 if (!validate_arg (dest, POINTER_TYPE)
8682 || !validate_arg (src, POINTER_TYPE)
8683 || !validate_arg (len, INTEGER_TYPE))
8686 /* If the LEN parameter is zero, return DEST. */
8687 if (integer_zerop (len))
8688 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8690 /* We can't compare slen with len as constants below if len is not a
8692 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8696 slen = c_strlen (src, 1);
8698 /* Now, we must be passed a constant src ptr parameter. */
8699 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8702 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8704 /* We do not support simplification of this case, though we do
8705 support it when expanding trees into RTL. */
8706 /* FIXME: generate a call to __builtin_memset. */
8707 if (tree_int_cst_lt (slen, len))
8710 /* OK transform into builtin memcpy. */
8711 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8714 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8715 build_call_expr_loc (loc, fn, 3, dest, src, len));
8718 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8719 arguments to the call, and TYPE is its return type.
8720 Return NULL_TREE if no simplification can be made. */
8723 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8725 if (!validate_arg (arg1, POINTER_TYPE)
8726 || !validate_arg (arg2, INTEGER_TYPE)
8727 || !validate_arg (len, INTEGER_TYPE))
8733 if (TREE_CODE (arg2) != INTEGER_CST
8734 || !host_integerp (len, 1))
8737 p1 = c_getstr (arg1);
8738 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8744 if (target_char_cast (arg2, &c))
8747 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8750 return build_int_cst (TREE_TYPE (arg1), 0);
8752 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8754 return fold_convert_loc (loc, type, tem);
8760 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8761 Return NULL_TREE if no simplification can be made. */
8764 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8766 const char *p1, *p2;
8768 if (!validate_arg (arg1, POINTER_TYPE)
8769 || !validate_arg (arg2, POINTER_TYPE)
8770 || !validate_arg (len, INTEGER_TYPE))
8773 /* If the LEN parameter is zero, return zero. */
8774 if (integer_zerop (len))
8775 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8778 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8779 if (operand_equal_p (arg1, arg2, 0))
8780 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8782 p1 = c_getstr (arg1);
8783 p2 = c_getstr (arg2);
8785 /* If all arguments are constant, and the value of len is not greater
8786 than the lengths of arg1 and arg2, evaluate at compile-time. */
8787 if (host_integerp (len, 1) && p1 && p2
8788 && compare_tree_int (len, strlen (p1) + 1) <= 0
8789 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8791 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8794 return integer_one_node;
8796 return integer_minus_one_node;
8798 return integer_zero_node;
8801 /* If len parameter is one, return an expression corresponding to
8802 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8803 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8805 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8806 tree cst_uchar_ptr_node
8807 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8810 = fold_convert_loc (loc, integer_type_node,
8811 build1 (INDIRECT_REF, cst_uchar_node,
8812 fold_convert_loc (loc,
8816 = fold_convert_loc (loc, integer_type_node,
8817 build1 (INDIRECT_REF, cst_uchar_node,
8818 fold_convert_loc (loc,
8821 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8827 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8828 Return NULL_TREE if no simplification can be made. */
8831 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8833 const char *p1, *p2;
8835 if (!validate_arg (arg1, POINTER_TYPE)
8836 || !validate_arg (arg2, POINTER_TYPE))
8839 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8840 if (operand_equal_p (arg1, arg2, 0))
8841 return integer_zero_node;
8843 p1 = c_getstr (arg1);
8844 p2 = c_getstr (arg2);
8848 const int i = strcmp (p1, p2);
8850 return integer_minus_one_node;
8852 return integer_one_node;
8854 return integer_zero_node;
8857 /* If the second arg is "", return *(const unsigned char*)arg1. */
8858 if (p2 && *p2 == '\0')
8860 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8861 tree cst_uchar_ptr_node
8862 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8864 return fold_convert_loc (loc, integer_type_node,
8865 build1 (INDIRECT_REF, cst_uchar_node,
8866 fold_convert_loc (loc,
8871 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8872 if (p1 && *p1 == '\0')
8874 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8875 tree cst_uchar_ptr_node
8876 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8879 = fold_convert_loc (loc, integer_type_node,
8880 build1 (INDIRECT_REF, cst_uchar_node,
8881 fold_convert_loc (loc,
8884 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8890 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8891 Return NULL_TREE if no simplification can be made. */
8894 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8896 const char *p1, *p2;
8898 if (!validate_arg (arg1, POINTER_TYPE)
8899 || !validate_arg (arg2, POINTER_TYPE)
8900 || !validate_arg (len, INTEGER_TYPE))
8903 /* If the LEN parameter is zero, return zero. */
8904 if (integer_zerop (len))
8905 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8908 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8909 if (operand_equal_p (arg1, arg2, 0))
8910 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8912 p1 = c_getstr (arg1);
8913 p2 = c_getstr (arg2);
8915 if (host_integerp (len, 1) && p1 && p2)
8917 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8919 return integer_one_node;
8921 return integer_minus_one_node;
8923 return integer_zero_node;
8926 /* If the second arg is "", and the length is greater than zero,
8927 return *(const unsigned char*)arg1. */
8928 if (p2 && *p2 == '\0'
8929 && TREE_CODE (len) == INTEGER_CST
8930 && tree_int_cst_sgn (len) == 1)
8932 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8933 tree cst_uchar_ptr_node
8934 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8936 return fold_convert_loc (loc, integer_type_node,
8937 build1 (INDIRECT_REF, cst_uchar_node,
8938 fold_convert_loc (loc,
8943 /* If the first arg is "", and the length is greater than zero,
8944 return -*(const unsigned char*)arg2. */
8945 if (p1 && *p1 == '\0'
8946 && TREE_CODE (len) == INTEGER_CST
8947 && tree_int_cst_sgn (len) == 1)
8949 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8950 tree cst_uchar_ptr_node
8951 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8953 tree temp = fold_convert_loc (loc, integer_type_node,
8954 build1 (INDIRECT_REF, cst_uchar_node,
8955 fold_convert_loc (loc,
8958 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8961 /* If len parameter is one, return an expression corresponding to
8962 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8963 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8965 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8966 tree cst_uchar_ptr_node
8967 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8969 tree ind1 = fold_convert_loc (loc, integer_type_node,
8970 build1 (INDIRECT_REF, cst_uchar_node,
8971 fold_convert_loc (loc,
8974 tree ind2 = fold_convert_loc (loc, integer_type_node,
8975 build1 (INDIRECT_REF, cst_uchar_node,
8976 fold_convert_loc (loc,
8979 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8985 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8986 ARG. Return NULL_TREE if no simplification can be made. */
8989 fold_builtin_signbit (location_t loc, tree arg, tree type)
8993 if (!validate_arg (arg, REAL_TYPE))
8996 /* If ARG is a compile-time constant, determine the result. */
8997 if (TREE_CODE (arg) == REAL_CST
8998 && !TREE_OVERFLOW (arg))
9002 c = TREE_REAL_CST (arg);
9003 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9004 return fold_convert_loc (loc, type, temp);
9007 /* If ARG is non-negative, the result is always zero. */
9008 if (tree_expr_nonnegative_p (arg))
9009 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9011 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9012 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9013 return fold_build2_loc (loc, LT_EXPR, type, arg,
9014 build_real (TREE_TYPE (arg), dconst0));
9019 /* Fold function call to builtin copysign, copysignf or copysignl with
9020 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9024 fold_builtin_copysign (location_t loc, tree fndecl,
9025 tree arg1, tree arg2, tree type)
9029 if (!validate_arg (arg1, REAL_TYPE)
9030 || !validate_arg (arg2, REAL_TYPE))
9033 /* copysign(X,X) is X. */
9034 if (operand_equal_p (arg1, arg2, 0))
9035 return fold_convert_loc (loc, type, arg1);
9037 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9038 if (TREE_CODE (arg1) == REAL_CST
9039 && TREE_CODE (arg2) == REAL_CST
9040 && !TREE_OVERFLOW (arg1)
9041 && !TREE_OVERFLOW (arg2))
9043 REAL_VALUE_TYPE c1, c2;
9045 c1 = TREE_REAL_CST (arg1);
9046 c2 = TREE_REAL_CST (arg2);
9047 /* c1.sign := c2.sign. */
9048 real_copysign (&c1, &c2);
9049 return build_real (type, c1);
9052 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9053 Remember to evaluate Y for side-effects. */
9054 if (tree_expr_nonnegative_p (arg2))
9055 return omit_one_operand_loc (loc, type,
9056 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9059 /* Strip sign changing operations for the first argument. */
9060 tem = fold_strip_sign_ops (arg1);
9062 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9067 /* Fold a call to builtin isascii with argument ARG. */
9070 fold_builtin_isascii (location_t loc, tree arg)
9072 if (!validate_arg (arg, INTEGER_TYPE))
9076 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9077 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9078 build_int_cst (NULL_TREE,
9079 ~ (unsigned HOST_WIDE_INT) 0x7f));
9080 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9081 arg, integer_zero_node);
9085 /* Fold a call to builtin toascii with argument ARG. */
9088 fold_builtin_toascii (location_t loc, tree arg)
9090 if (!validate_arg (arg, INTEGER_TYPE))
9093 /* Transform toascii(c) -> (c & 0x7f). */
9094 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9095 build_int_cst (NULL_TREE, 0x7f));
9098 /* Fold a call to builtin isdigit with argument ARG. */
9101 fold_builtin_isdigit (location_t loc, tree arg)
9103 if (!validate_arg (arg, INTEGER_TYPE))
9107 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9108 /* According to the C standard, isdigit is unaffected by locale.
9109 However, it definitely is affected by the target character set. */
9110 unsigned HOST_WIDE_INT target_digit0
9111 = lang_hooks.to_target_charset ('0');
9113 if (target_digit0 == 0)
9116 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9117 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9118 build_int_cst (unsigned_type_node, target_digit0));
9119 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9120 build_int_cst (unsigned_type_node, 9));
9124 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9127 fold_builtin_fabs (location_t loc, tree arg, tree type)
9129 if (!validate_arg (arg, REAL_TYPE))
9132 arg = fold_convert_loc (loc, type, arg);
9133 if (TREE_CODE (arg) == REAL_CST)
9134 return fold_abs_const (arg, type);
9135 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9138 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9141 fold_builtin_abs (location_t loc, tree arg, tree type)
9143 if (!validate_arg (arg, INTEGER_TYPE))
9146 arg = fold_convert_loc (loc, type, arg);
9147 if (TREE_CODE (arg) == INTEGER_CST)
9148 return fold_abs_const (arg, type);
9149 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9152 /* Fold a call to builtin fmin or fmax. */
9155 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9156 tree type, bool max)
9158 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9160 /* Calculate the result when the argument is a constant. */
9161 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9166 /* If either argument is NaN, return the other one. Avoid the
9167 transformation if we get (and honor) a signalling NaN. Using
9168 omit_one_operand() ensures we create a non-lvalue. */
9169 if (TREE_CODE (arg0) == REAL_CST
9170 && real_isnan (&TREE_REAL_CST (arg0))
9171 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9172 || ! TREE_REAL_CST (arg0).signalling))
9173 return omit_one_operand_loc (loc, type, arg1, arg0);
9174 if (TREE_CODE (arg1) == REAL_CST
9175 && real_isnan (&TREE_REAL_CST (arg1))
9176 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9177 || ! TREE_REAL_CST (arg1).signalling))
9178 return omit_one_operand_loc (loc, type, arg0, arg1);
9180 /* Transform fmin/fmax(x,x) -> x. */
9181 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9182 return omit_one_operand_loc (loc, type, arg0, arg1);
9184 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9185 functions to return the numeric arg if the other one is NaN.
9186 These tree codes don't honor that, so only transform if
9187 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9188 handled, so we don't have to worry about it either. */
9189 if (flag_finite_math_only)
9190 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9191 fold_convert_loc (loc, type, arg0),
9192 fold_convert_loc (loc, type, arg1));
9197 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9200 fold_builtin_carg (location_t loc, tree arg, tree type)
9202 if (validate_arg (arg, COMPLEX_TYPE)
9203 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9205 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9209 tree new_arg = builtin_save_expr (arg);
9210 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9211 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9212 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9219 /* Fold a call to builtin logb/ilogb. */
9222 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9224 if (! validate_arg (arg, REAL_TYPE))
9229 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9231 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9237 /* If arg is Inf or NaN and we're logb, return it. */
9238 if (TREE_CODE (rettype) == REAL_TYPE)
9239 return fold_convert_loc (loc, rettype, arg);
9240 /* Fall through... */
9242 /* Zero may set errno and/or raise an exception for logb, also
9243 for ilogb we don't know FP_ILOGB0. */
9246 /* For normal numbers, proceed iff radix == 2. In GCC,
9247 normalized significands are in the range [0.5, 1.0). We
9248 want the exponent as if they were [1.0, 2.0) so get the
9249 exponent and subtract 1. */
9250 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9251 return fold_convert_loc (loc, rettype,
9252 build_int_cst (NULL_TREE,
9253 REAL_EXP (value)-1));
9261 /* Fold a call to builtin significand, if radix == 2. */
9264 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9266 if (! validate_arg (arg, REAL_TYPE))
9271 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9273 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9280 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9281 return fold_convert_loc (loc, rettype, arg);
9283 /* For normal numbers, proceed iff radix == 2. */
9284 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9286 REAL_VALUE_TYPE result = *value;
9287 /* In GCC, normalized significands are in the range [0.5,
9288 1.0). We want them to be [1.0, 2.0) so set the
9290 SET_REAL_EXP (&result, 1);
9291 return build_real (rettype, result);
9300 /* Fold a call to builtin frexp, we can assume the base is 2. */
9303 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9305 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9310 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9313 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9315 /* Proceed if a valid pointer type was passed in. */
9316 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9318 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9324 /* For +-0, return (*exp = 0, +-0). */
9325 exp = integer_zero_node;
9330 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9331 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9334 /* Since the frexp function always expects base 2, and in
9335 GCC normalized significands are already in the range
9336 [0.5, 1.0), we have exactly what frexp wants. */
9337 REAL_VALUE_TYPE frac_rvt = *value;
9338 SET_REAL_EXP (&frac_rvt, 0);
9339 frac = build_real (rettype, frac_rvt);
9340 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9347 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9348 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9349 TREE_SIDE_EFFECTS (arg1) = 1;
9350 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9356 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9357 then we can assume the base is two. If it's false, then we have to
9358 check the mode of the TYPE parameter in certain cases. */
9361 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9362 tree type, bool ldexp)
9364 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9369 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9370 if (real_zerop (arg0) || integer_zerop (arg1)
9371 || (TREE_CODE (arg0) == REAL_CST
9372 && !real_isfinite (&TREE_REAL_CST (arg0))))
9373 return omit_one_operand_loc (loc, type, arg0, arg1);
9375 /* If both arguments are constant, then try to evaluate it. */
9376 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9377 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9378 && host_integerp (arg1, 0))
9380 /* Bound the maximum adjustment to twice the range of the
9381 mode's valid exponents. Use abs to ensure the range is
9382 positive as a sanity check. */
9383 const long max_exp_adj = 2 *
9384 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9385 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9387 /* Get the user-requested adjustment. */
9388 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9390 /* The requested adjustment must be inside this range. This
9391 is a preliminary cap to avoid things like overflow, we
9392 may still fail to compute the result for other reasons. */
9393 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9395 REAL_VALUE_TYPE initial_result;
9397 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9399 /* Ensure we didn't overflow. */
9400 if (! real_isinf (&initial_result))
9402 const REAL_VALUE_TYPE trunc_result
9403 = real_value_truncate (TYPE_MODE (type), initial_result);
9405 /* Only proceed if the target mode can hold the
9407 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9408 return build_real (type, trunc_result);
9417 /* Fold a call to builtin modf. */
9420 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9422 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9427 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9430 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9432 /* Proceed if a valid pointer type was passed in. */
9433 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9435 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9436 REAL_VALUE_TYPE trunc, frac;
9442 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9443 trunc = frac = *value;
9446 /* For +-Inf, return (*arg1 = arg0, +-0). */
9448 frac.sign = value->sign;
9452 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9453 real_trunc (&trunc, VOIDmode, value);
9454 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9455 /* If the original number was negative and already
9456 integral, then the fractional part is -0.0. */
9457 if (value->sign && frac.cl == rvc_zero)
9458 frac.sign = value->sign;
9462 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9463 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9464 build_real (rettype, trunc));
9465 TREE_SIDE_EFFECTS (arg1) = 1;
9466 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9467 build_real (rettype, frac));
9473 /* Given a location LOC, an interclass builtin function decl FNDECL
9474 and its single argument ARG, return an folded expression computing
9475 the same, or NULL_TREE if we either couldn't or didn't want to fold
9476 (the latter happen if there's an RTL instruction available). */
9479 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9481 enum machine_mode mode;
9483 if (!validate_arg (arg, REAL_TYPE))
9486 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9489 mode = TYPE_MODE (TREE_TYPE (arg));
9491 /* If there is no optab, try generic code. */
9492 switch (DECL_FUNCTION_CODE (fndecl))
9496 CASE_FLT_FN (BUILT_IN_ISINF):
9498 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9499 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9500 tree const type = TREE_TYPE (arg);
9504 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9505 real_from_string (&r, buf);
9506 result = build_call_expr (isgr_fn, 2,
9507 fold_build1_loc (loc, ABS_EXPR, type, arg),
9508 build_real (type, r));
9511 CASE_FLT_FN (BUILT_IN_FINITE):
9512 case BUILT_IN_ISFINITE:
9514 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9515 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9516 tree const type = TREE_TYPE (arg);
9520 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9521 real_from_string (&r, buf);
9522 result = build_call_expr (isle_fn, 2,
9523 fold_build1_loc (loc, ABS_EXPR, type, arg),
9524 build_real (type, r));
9525 /*result = fold_build2_loc (loc, UNGT_EXPR,
9526 TREE_TYPE (TREE_TYPE (fndecl)),
9527 fold_build1_loc (loc, ABS_EXPR, type, arg),
9528 build_real (type, r));
9529 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9530 TREE_TYPE (TREE_TYPE (fndecl)),
9534 case BUILT_IN_ISNORMAL:
9536 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9537 islessequal(fabs(x),DBL_MAX). */
9538 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9539 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9540 tree const type = TREE_TYPE (arg);
9541 REAL_VALUE_TYPE rmax, rmin;
9544 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9545 real_from_string (&rmax, buf);
9546 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9547 real_from_string (&rmin, buf);
9548 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9549 result = build_call_expr (isle_fn, 2, arg,
9550 build_real (type, rmax));
9551 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9552 build_call_expr (isge_fn, 2, arg,
9553 build_real (type, rmin)));
9563 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9564 ARG is the argument for the call. */
9567 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9569 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9572 if (!validate_arg (arg, REAL_TYPE))
9575 switch (builtin_index)
9577 case BUILT_IN_ISINF:
9578 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9579 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9581 if (TREE_CODE (arg) == REAL_CST)
9583 r = TREE_REAL_CST (arg);
9584 if (real_isinf (&r))
9585 return real_compare (GT_EXPR, &r, &dconst0)
9586 ? integer_one_node : integer_minus_one_node;
9588 return integer_zero_node;
9593 case BUILT_IN_ISINF_SIGN:
9595 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9596 /* In a boolean context, GCC will fold the inner COND_EXPR to
9597 1. So e.g. "if (isinf_sign(x))" would be folded to just
9598 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9599 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9600 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9601 tree tmp = NULL_TREE;
9603 arg = builtin_save_expr (arg);
9605 if (signbit_fn && isinf_fn)
9607 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9608 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9610 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9611 signbit_call, integer_zero_node);
9612 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9613 isinf_call, integer_zero_node);
9615 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9616 integer_minus_one_node, integer_one_node);
9617 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9625 case BUILT_IN_ISFINITE:
9626 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9627 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9628 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9630 if (TREE_CODE (arg) == REAL_CST)
9632 r = TREE_REAL_CST (arg);
9633 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9638 case BUILT_IN_ISNAN:
9639 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9640 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9642 if (TREE_CODE (arg) == REAL_CST)
9644 r = TREE_REAL_CST (arg);
9645 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9648 arg = builtin_save_expr (arg);
9649 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9656 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9657 This builtin will generate code to return the appropriate floating
9658 point classification depending on the value of the floating point
9659 number passed in. The possible return values must be supplied as
9660 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9661 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9662 one floating point argument which is "type generic". */
9665 fold_builtin_fpclassify (location_t loc, tree exp)
9667 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9668 arg, type, res, tmp;
9669 enum machine_mode mode;
9673 /* Verify the required arguments in the original call. */
9674 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9675 INTEGER_TYPE, INTEGER_TYPE,
9676 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9679 fp_nan = CALL_EXPR_ARG (exp, 0);
9680 fp_infinite = CALL_EXPR_ARG (exp, 1);
9681 fp_normal = CALL_EXPR_ARG (exp, 2);
9682 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9683 fp_zero = CALL_EXPR_ARG (exp, 4);
9684 arg = CALL_EXPR_ARG (exp, 5);
9685 type = TREE_TYPE (arg);
9686 mode = TYPE_MODE (type);
9687 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9691 (fabs(x) == Inf ? FP_INFINITE :
9692 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9693 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9695 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9696 build_real (type, dconst0));
9697 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9698 tmp, fp_zero, fp_subnormal);
9700 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9701 real_from_string (&r, buf);
9702 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9703 arg, build_real (type, r));
9704 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9706 if (HONOR_INFINITIES (mode))
9709 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9710 build_real (type, r));
9711 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9715 if (HONOR_NANS (mode))
9717 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9718 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9724 /* Fold a call to an unordered comparison function such as
9725 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9726 being called and ARG0 and ARG1 are the arguments for the call.
9727 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9728 the opposite of the desired result. UNORDERED_CODE is used
9729 for modes that can hold NaNs and ORDERED_CODE is used for
9733 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9734 enum tree_code unordered_code,
9735 enum tree_code ordered_code)
9737 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9738 enum tree_code code;
9740 enum tree_code code0, code1;
9741 tree cmp_type = NULL_TREE;
9743 type0 = TREE_TYPE (arg0);
9744 type1 = TREE_TYPE (arg1);
9746 code0 = TREE_CODE (type0);
9747 code1 = TREE_CODE (type1);
9749 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9750 /* Choose the wider of two real types. */
9751 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9753 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9755 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9758 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9759 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9761 if (unordered_code == UNORDERED_EXPR)
9763 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9764 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9765 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9768 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9770 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9771 fold_build2_loc (loc, code, type, arg0, arg1));
9774 /* Fold a call to built-in function FNDECL with 0 arguments.
9775 IGNORE is true if the result of the function call is ignored. This
9776 function returns NULL_TREE if no simplification was possible. */
9779 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9781 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9782 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9785 CASE_FLT_FN (BUILT_IN_INF):
9786 case BUILT_IN_INFD32:
9787 case BUILT_IN_INFD64:
9788 case BUILT_IN_INFD128:
9789 return fold_builtin_inf (loc, type, true);
9791 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9792 return fold_builtin_inf (loc, type, false);
9794 case BUILT_IN_CLASSIFY_TYPE:
9795 return fold_builtin_classify_type (NULL_TREE);
9803 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9804 IGNORE is true if the result of the function call is ignored. This
9805 function returns NULL_TREE if no simplification was possible. */
9808 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9810 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9811 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9814 case BUILT_IN_CONSTANT_P:
9816 tree val = fold_builtin_constant_p (arg0);
9818 /* Gimplification will pull the CALL_EXPR for the builtin out of
9819 an if condition. When not optimizing, we'll not CSE it back.
9820 To avoid link error types of regressions, return false now. */
9821 if (!val && !optimize)
9822 val = integer_zero_node;
9827 case BUILT_IN_CLASSIFY_TYPE:
9828 return fold_builtin_classify_type (arg0);
9830 case BUILT_IN_STRLEN:
9831 return fold_builtin_strlen (loc, type, arg0);
9833 CASE_FLT_FN (BUILT_IN_FABS):
9834 return fold_builtin_fabs (loc, arg0, type);
9838 case BUILT_IN_LLABS:
9839 case BUILT_IN_IMAXABS:
9840 return fold_builtin_abs (loc, arg0, type);
9842 CASE_FLT_FN (BUILT_IN_CONJ):
9843 if (validate_arg (arg0, COMPLEX_TYPE)
9844 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9845 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9848 CASE_FLT_FN (BUILT_IN_CREAL):
9849 if (validate_arg (arg0, COMPLEX_TYPE)
9850 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9851 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9854 CASE_FLT_FN (BUILT_IN_CIMAG):
9855 if (validate_arg (arg0, COMPLEX_TYPE)
9856 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9857 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9860 CASE_FLT_FN (BUILT_IN_CCOS):
9861 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9863 CASE_FLT_FN (BUILT_IN_CCOSH):
9864 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9866 CASE_FLT_FN (BUILT_IN_CPROJ):
9867 return fold_builtin_cproj(loc, arg0, type);
9869 CASE_FLT_FN (BUILT_IN_CSIN):
9870 if (validate_arg (arg0, COMPLEX_TYPE)
9871 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9872 return do_mpc_arg1 (arg0, type, mpc_sin);
9875 CASE_FLT_FN (BUILT_IN_CSINH):
9876 if (validate_arg (arg0, COMPLEX_TYPE)
9877 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9878 return do_mpc_arg1 (arg0, type, mpc_sinh);
9881 CASE_FLT_FN (BUILT_IN_CTAN):
9882 if (validate_arg (arg0, COMPLEX_TYPE)
9883 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9884 return do_mpc_arg1 (arg0, type, mpc_tan);
9887 CASE_FLT_FN (BUILT_IN_CTANH):
9888 if (validate_arg (arg0, COMPLEX_TYPE)
9889 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9890 return do_mpc_arg1 (arg0, type, mpc_tanh);
9893 CASE_FLT_FN (BUILT_IN_CLOG):
9894 if (validate_arg (arg0, COMPLEX_TYPE)
9895 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9896 return do_mpc_arg1 (arg0, type, mpc_log);
9899 CASE_FLT_FN (BUILT_IN_CSQRT):
9900 if (validate_arg (arg0, COMPLEX_TYPE)
9901 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9902 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9905 CASE_FLT_FN (BUILT_IN_CASIN):
9906 if (validate_arg (arg0, COMPLEX_TYPE)
9907 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9908 return do_mpc_arg1 (arg0, type, mpc_asin);
9911 CASE_FLT_FN (BUILT_IN_CACOS):
9912 if (validate_arg (arg0, COMPLEX_TYPE)
9913 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9914 return do_mpc_arg1 (arg0, type, mpc_acos);
9917 CASE_FLT_FN (BUILT_IN_CATAN):
9918 if (validate_arg (arg0, COMPLEX_TYPE)
9919 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9920 return do_mpc_arg1 (arg0, type, mpc_atan);
9923 CASE_FLT_FN (BUILT_IN_CASINH):
9924 if (validate_arg (arg0, COMPLEX_TYPE)
9925 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9926 return do_mpc_arg1 (arg0, type, mpc_asinh);
9929 CASE_FLT_FN (BUILT_IN_CACOSH):
9930 if (validate_arg (arg0, COMPLEX_TYPE)
9931 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9932 return do_mpc_arg1 (arg0, type, mpc_acosh);
9935 CASE_FLT_FN (BUILT_IN_CATANH):
9936 if (validate_arg (arg0, COMPLEX_TYPE)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9938 return do_mpc_arg1 (arg0, type, mpc_atanh);
9941 CASE_FLT_FN (BUILT_IN_CABS):
9942 return fold_builtin_cabs (loc, arg0, type, fndecl);
9944 CASE_FLT_FN (BUILT_IN_CARG):
9945 return fold_builtin_carg (loc, arg0, type);
9947 CASE_FLT_FN (BUILT_IN_SQRT):
9948 return fold_builtin_sqrt (loc, arg0, type);
9950 CASE_FLT_FN (BUILT_IN_CBRT):
9951 return fold_builtin_cbrt (loc, arg0, type);
9953 CASE_FLT_FN (BUILT_IN_ASIN):
9954 if (validate_arg (arg0, REAL_TYPE))
9955 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9956 &dconstm1, &dconst1, true);
9959 CASE_FLT_FN (BUILT_IN_ACOS):
9960 if (validate_arg (arg0, REAL_TYPE))
9961 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9962 &dconstm1, &dconst1, true);
9965 CASE_FLT_FN (BUILT_IN_ATAN):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9970 CASE_FLT_FN (BUILT_IN_ASINH):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9975 CASE_FLT_FN (BUILT_IN_ACOSH):
9976 if (validate_arg (arg0, REAL_TYPE))
9977 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9978 &dconst1, NULL, true);
9981 CASE_FLT_FN (BUILT_IN_ATANH):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9984 &dconstm1, &dconst1, false);
9987 CASE_FLT_FN (BUILT_IN_SIN):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9992 CASE_FLT_FN (BUILT_IN_COS):
9993 return fold_builtin_cos (loc, arg0, type, fndecl);
9995 CASE_FLT_FN (BUILT_IN_TAN):
9996 return fold_builtin_tan (arg0, type);
9998 CASE_FLT_FN (BUILT_IN_CEXP):
9999 return fold_builtin_cexp (loc, arg0, type);
10001 CASE_FLT_FN (BUILT_IN_CEXPI):
10002 if (validate_arg (arg0, REAL_TYPE))
10003 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10006 CASE_FLT_FN (BUILT_IN_SINH):
10007 if (validate_arg (arg0, REAL_TYPE))
10008 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10011 CASE_FLT_FN (BUILT_IN_COSH):
10012 return fold_builtin_cosh (loc, arg0, type, fndecl);
10014 CASE_FLT_FN (BUILT_IN_TANH):
10015 if (validate_arg (arg0, REAL_TYPE))
10016 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10019 CASE_FLT_FN (BUILT_IN_ERF):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10024 CASE_FLT_FN (BUILT_IN_ERFC):
10025 if (validate_arg (arg0, REAL_TYPE))
10026 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10029 CASE_FLT_FN (BUILT_IN_TGAMMA):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10034 CASE_FLT_FN (BUILT_IN_EXP):
10035 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10037 CASE_FLT_FN (BUILT_IN_EXP2):
10038 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10040 CASE_FLT_FN (BUILT_IN_EXP10):
10041 CASE_FLT_FN (BUILT_IN_POW10):
10042 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10044 CASE_FLT_FN (BUILT_IN_EXPM1):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10049 CASE_FLT_FN (BUILT_IN_LOG):
10050 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10052 CASE_FLT_FN (BUILT_IN_LOG2):
10053 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10055 CASE_FLT_FN (BUILT_IN_LOG10):
10056 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10058 CASE_FLT_FN (BUILT_IN_LOG1P):
10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10061 &dconstm1, NULL, false);
10064 CASE_FLT_FN (BUILT_IN_J0):
10065 if (validate_arg (arg0, REAL_TYPE))
10066 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10070 CASE_FLT_FN (BUILT_IN_J1):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10076 CASE_FLT_FN (BUILT_IN_Y0):
10077 if (validate_arg (arg0, REAL_TYPE))
10078 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10079 &dconst0, NULL, false);
10082 CASE_FLT_FN (BUILT_IN_Y1):
10083 if (validate_arg (arg0, REAL_TYPE))
10084 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10085 &dconst0, NULL, false);
10088 CASE_FLT_FN (BUILT_IN_NAN):
10089 case BUILT_IN_NAND32:
10090 case BUILT_IN_NAND64:
10091 case BUILT_IN_NAND128:
10092 return fold_builtin_nan (arg0, type, true);
10094 CASE_FLT_FN (BUILT_IN_NANS):
10095 return fold_builtin_nan (arg0, type, false);
10097 CASE_FLT_FN (BUILT_IN_FLOOR):
10098 return fold_builtin_floor (loc, fndecl, arg0);
10100 CASE_FLT_FN (BUILT_IN_CEIL):
10101 return fold_builtin_ceil (loc, fndecl, arg0);
10103 CASE_FLT_FN (BUILT_IN_TRUNC):
10104 return fold_builtin_trunc (loc, fndecl, arg0);
10106 CASE_FLT_FN (BUILT_IN_ROUND):
10107 return fold_builtin_round (loc, fndecl, arg0);
10109 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10110 CASE_FLT_FN (BUILT_IN_RINT):
10111 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10113 CASE_FLT_FN (BUILT_IN_LCEIL):
10114 CASE_FLT_FN (BUILT_IN_LLCEIL):
10115 CASE_FLT_FN (BUILT_IN_LFLOOR):
10116 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10117 CASE_FLT_FN (BUILT_IN_LROUND):
10118 CASE_FLT_FN (BUILT_IN_LLROUND):
10119 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10121 CASE_FLT_FN (BUILT_IN_LRINT):
10122 CASE_FLT_FN (BUILT_IN_LLRINT):
10123 return fold_fixed_mathfn (loc, fndecl, arg0);
10125 case BUILT_IN_BSWAP32:
10126 case BUILT_IN_BSWAP64:
10127 return fold_builtin_bswap (fndecl, arg0);
10129 CASE_INT_FN (BUILT_IN_FFS):
10130 CASE_INT_FN (BUILT_IN_CLZ):
10131 CASE_INT_FN (BUILT_IN_CTZ):
10132 CASE_INT_FN (BUILT_IN_POPCOUNT):
10133 CASE_INT_FN (BUILT_IN_PARITY):
10134 return fold_builtin_bitop (fndecl, arg0);
10136 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10137 return fold_builtin_signbit (loc, arg0, type);
10139 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10140 return fold_builtin_significand (loc, arg0, type);
10142 CASE_FLT_FN (BUILT_IN_ILOGB):
10143 CASE_FLT_FN (BUILT_IN_LOGB):
10144 return fold_builtin_logb (loc, arg0, type);
10146 case BUILT_IN_ISASCII:
10147 return fold_builtin_isascii (loc, arg0);
10149 case BUILT_IN_TOASCII:
10150 return fold_builtin_toascii (loc, arg0);
10152 case BUILT_IN_ISDIGIT:
10153 return fold_builtin_isdigit (loc, arg0);
10155 CASE_FLT_FN (BUILT_IN_FINITE):
10156 case BUILT_IN_FINITED32:
10157 case BUILT_IN_FINITED64:
10158 case BUILT_IN_FINITED128:
10159 case BUILT_IN_ISFINITE:
10161 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10164 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10167 CASE_FLT_FN (BUILT_IN_ISINF):
10168 case BUILT_IN_ISINFD32:
10169 case BUILT_IN_ISINFD64:
10170 case BUILT_IN_ISINFD128:
10172 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10175 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10178 case BUILT_IN_ISNORMAL:
10179 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10181 case BUILT_IN_ISINF_SIGN:
10182 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10184 CASE_FLT_FN (BUILT_IN_ISNAN):
10185 case BUILT_IN_ISNAND32:
10186 case BUILT_IN_ISNAND64:
10187 case BUILT_IN_ISNAND128:
10188 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10190 case BUILT_IN_PRINTF:
10191 case BUILT_IN_PRINTF_UNLOCKED:
10192 case BUILT_IN_VPRINTF:
10193 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10195 case BUILT_IN_FREE:
10196 if (integer_zerop (arg0))
10197 return build_empty_stmt (loc);
10208 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10209 IGNORE is true if the result of the function call is ignored. This
10210 function returns NULL_TREE if no simplification was possible. */
10213 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10220 CASE_FLT_FN (BUILT_IN_JN):
10221 if (validate_arg (arg0, INTEGER_TYPE)
10222 && validate_arg (arg1, REAL_TYPE))
10223 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10226 CASE_FLT_FN (BUILT_IN_YN):
10227 if (validate_arg (arg0, INTEGER_TYPE)
10228 && validate_arg (arg1, REAL_TYPE))
10229 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10233 CASE_FLT_FN (BUILT_IN_DREM):
10234 CASE_FLT_FN (BUILT_IN_REMAINDER):
10235 if (validate_arg (arg0, REAL_TYPE)
10236 && validate_arg(arg1, REAL_TYPE))
10237 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10240 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10241 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10242 if (validate_arg (arg0, REAL_TYPE)
10243 && validate_arg(arg1, POINTER_TYPE))
10244 return do_mpfr_lgamma_r (arg0, arg1, type);
10247 CASE_FLT_FN (BUILT_IN_ATAN2):
10248 if (validate_arg (arg0, REAL_TYPE)
10249 && validate_arg(arg1, REAL_TYPE))
10250 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10253 CASE_FLT_FN (BUILT_IN_FDIM):
10254 if (validate_arg (arg0, REAL_TYPE)
10255 && validate_arg(arg1, REAL_TYPE))
10256 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10259 CASE_FLT_FN (BUILT_IN_HYPOT):
10260 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10262 CASE_FLT_FN (BUILT_IN_CPOW):
10263 if (validate_arg (arg0, COMPLEX_TYPE)
10264 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10265 && validate_arg (arg1, COMPLEX_TYPE)
10266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10267 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10270 CASE_FLT_FN (BUILT_IN_LDEXP):
10271 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10272 CASE_FLT_FN (BUILT_IN_SCALBN):
10273 CASE_FLT_FN (BUILT_IN_SCALBLN):
10274 return fold_builtin_load_exponent (loc, arg0, arg1,
10275 type, /*ldexp=*/false);
10277 CASE_FLT_FN (BUILT_IN_FREXP):
10278 return fold_builtin_frexp (loc, arg0, arg1, type);
10280 CASE_FLT_FN (BUILT_IN_MODF):
10281 return fold_builtin_modf (loc, arg0, arg1, type);
10283 case BUILT_IN_BZERO:
10284 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10286 case BUILT_IN_FPUTS:
10287 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10289 case BUILT_IN_FPUTS_UNLOCKED:
10290 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10292 case BUILT_IN_STRSTR:
10293 return fold_builtin_strstr (loc, arg0, arg1, type);
10295 case BUILT_IN_STRCAT:
10296 return fold_builtin_strcat (loc, arg0, arg1);
10298 case BUILT_IN_STRSPN:
10299 return fold_builtin_strspn (loc, arg0, arg1);
10301 case BUILT_IN_STRCSPN:
10302 return fold_builtin_strcspn (loc, arg0, arg1);
10304 case BUILT_IN_STRCHR:
10305 case BUILT_IN_INDEX:
10306 return fold_builtin_strchr (loc, arg0, arg1, type);
10308 case BUILT_IN_STRRCHR:
10309 case BUILT_IN_RINDEX:
10310 return fold_builtin_strrchr (loc, arg0, arg1, type);
10312 case BUILT_IN_STRCPY:
10313 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10315 case BUILT_IN_STPCPY:
10318 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10322 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10325 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10328 case BUILT_IN_STRCMP:
10329 return fold_builtin_strcmp (loc, arg0, arg1);
10331 case BUILT_IN_STRPBRK:
10332 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10334 case BUILT_IN_EXPECT:
10335 return fold_builtin_expect (loc, arg0, arg1);
10337 CASE_FLT_FN (BUILT_IN_POW):
10338 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10340 CASE_FLT_FN (BUILT_IN_POWI):
10341 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10343 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10344 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10346 CASE_FLT_FN (BUILT_IN_FMIN):
10347 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10349 CASE_FLT_FN (BUILT_IN_FMAX):
10350 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10352 case BUILT_IN_ISGREATER:
10353 return fold_builtin_unordered_cmp (loc, fndecl,
10354 arg0, arg1, UNLE_EXPR, LE_EXPR);
10355 case BUILT_IN_ISGREATEREQUAL:
10356 return fold_builtin_unordered_cmp (loc, fndecl,
10357 arg0, arg1, UNLT_EXPR, LT_EXPR);
10358 case BUILT_IN_ISLESS:
10359 return fold_builtin_unordered_cmp (loc, fndecl,
10360 arg0, arg1, UNGE_EXPR, GE_EXPR);
10361 case BUILT_IN_ISLESSEQUAL:
10362 return fold_builtin_unordered_cmp (loc, fndecl,
10363 arg0, arg1, UNGT_EXPR, GT_EXPR);
10364 case BUILT_IN_ISLESSGREATER:
10365 return fold_builtin_unordered_cmp (loc, fndecl,
10366 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10367 case BUILT_IN_ISUNORDERED:
10368 return fold_builtin_unordered_cmp (loc, fndecl,
10369 arg0, arg1, UNORDERED_EXPR,
10372 /* We do the folding for va_start in the expander. */
10373 case BUILT_IN_VA_START:
10376 case BUILT_IN_SPRINTF:
10377 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10379 case BUILT_IN_OBJECT_SIZE:
10380 return fold_builtin_object_size (arg0, arg1);
10382 case BUILT_IN_PRINTF:
10383 case BUILT_IN_PRINTF_UNLOCKED:
10384 case BUILT_IN_VPRINTF:
10385 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10387 case BUILT_IN_PRINTF_CHK:
10388 case BUILT_IN_VPRINTF_CHK:
10389 if (!validate_arg (arg0, INTEGER_TYPE)
10390 || TREE_SIDE_EFFECTS (arg0))
10393 return fold_builtin_printf (loc, fndecl,
10394 arg1, NULL_TREE, ignore, fcode);
10397 case BUILT_IN_FPRINTF:
10398 case BUILT_IN_FPRINTF_UNLOCKED:
10399 case BUILT_IN_VFPRINTF:
10400 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10409 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10410 and ARG2. IGNORE is true if the result of the function call is ignored.
10411 This function returns NULL_TREE if no simplification was possible. */
10414 fold_builtin_3 (location_t loc, tree fndecl,
10415 tree arg0, tree arg1, tree arg2, bool ignore)
10417 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10418 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10422 CASE_FLT_FN (BUILT_IN_SINCOS):
10423 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10425 CASE_FLT_FN (BUILT_IN_FMA):
10426 if (validate_arg (arg0, REAL_TYPE)
10427 && validate_arg(arg1, REAL_TYPE)
10428 && validate_arg(arg2, REAL_TYPE))
10429 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10432 CASE_FLT_FN (BUILT_IN_REMQUO):
10433 if (validate_arg (arg0, REAL_TYPE)
10434 && validate_arg(arg1, REAL_TYPE)
10435 && validate_arg(arg2, POINTER_TYPE))
10436 return do_mpfr_remquo (arg0, arg1, arg2);
10439 case BUILT_IN_MEMSET:
10440 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10442 case BUILT_IN_BCOPY:
10443 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10444 void_type_node, true, /*endp=*/3);
10446 case BUILT_IN_MEMCPY:
10447 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10448 type, ignore, /*endp=*/0);
10450 case BUILT_IN_MEMPCPY:
10451 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10452 type, ignore, /*endp=*/1);
10454 case BUILT_IN_MEMMOVE:
10455 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10456 type, ignore, /*endp=*/3);
10458 case BUILT_IN_STRNCAT:
10459 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10461 case BUILT_IN_STRNCPY:
10462 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10464 case BUILT_IN_STRNCMP:
10465 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10467 case BUILT_IN_MEMCHR:
10468 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10470 case BUILT_IN_BCMP:
10471 case BUILT_IN_MEMCMP:
10472 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10474 case BUILT_IN_SPRINTF:
10475 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10477 case BUILT_IN_STRCPY_CHK:
10478 case BUILT_IN_STPCPY_CHK:
10479 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10482 case BUILT_IN_STRCAT_CHK:
10483 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10485 case BUILT_IN_PRINTF_CHK:
10486 case BUILT_IN_VPRINTF_CHK:
10487 if (!validate_arg (arg0, INTEGER_TYPE)
10488 || TREE_SIDE_EFFECTS (arg0))
10491 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10494 case BUILT_IN_FPRINTF:
10495 case BUILT_IN_FPRINTF_UNLOCKED:
10496 case BUILT_IN_VFPRINTF:
10497 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10500 case BUILT_IN_FPRINTF_CHK:
10501 case BUILT_IN_VFPRINTF_CHK:
10502 if (!validate_arg (arg1, INTEGER_TYPE)
10503 || TREE_SIDE_EFFECTS (arg1))
10506 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10515 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10516 ARG2, and ARG3. IGNORE is true if the result of the function call is
10517 ignored. This function returns NULL_TREE if no simplification was
10521 fold_builtin_4 (location_t loc, tree fndecl,
10522 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10524 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10528 case BUILT_IN_MEMCPY_CHK:
10529 case BUILT_IN_MEMPCPY_CHK:
10530 case BUILT_IN_MEMMOVE_CHK:
10531 case BUILT_IN_MEMSET_CHK:
10532 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10534 DECL_FUNCTION_CODE (fndecl));
10536 case BUILT_IN_STRNCPY_CHK:
10537 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10539 case BUILT_IN_STRNCAT_CHK:
10540 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10542 case BUILT_IN_FPRINTF_CHK:
10543 case BUILT_IN_VFPRINTF_CHK:
10544 if (!validate_arg (arg1, INTEGER_TYPE)
10545 || TREE_SIDE_EFFECTS (arg1))
10548 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10558 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10559 arguments, where NARGS <= 4. IGNORE is true if the result of the
10560 function call is ignored. This function returns NULL_TREE if no
10561 simplification was possible. Note that this only folds builtins with
10562 fixed argument patterns. Foldings that do varargs-to-varargs
10563 transformations, or that match calls with more than 4 arguments,
10564 need to be handled with fold_builtin_varargs instead. */
10566 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10569 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10571 tree ret = NULL_TREE;
10576 ret = fold_builtin_0 (loc, fndecl, ignore);
10579 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10582 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10585 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10588 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10596 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10597 SET_EXPR_LOCATION (ret, loc);
10598 TREE_NO_WARNING (ret) = 1;
10604 /* Builtins with folding operations that operate on "..." arguments
10605 need special handling; we need to store the arguments in a convenient
10606 data structure before attempting any folding. Fortunately there are
10607 only a few builtins that fall into this category. FNDECL is the
10608 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10609 result of the function call is ignored. */
10612 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10613 bool ignore ATTRIBUTE_UNUSED)
10615 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10616 tree ret = NULL_TREE;
10620 case BUILT_IN_SPRINTF_CHK:
10621 case BUILT_IN_VSPRINTF_CHK:
10622 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10625 case BUILT_IN_SNPRINTF_CHK:
10626 case BUILT_IN_VSNPRINTF_CHK:
10627 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10630 case BUILT_IN_FPCLASSIFY:
10631 ret = fold_builtin_fpclassify (loc, exp);
10639 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10640 SET_EXPR_LOCATION (ret, loc);
10641 TREE_NO_WARNING (ret) = 1;
10647 /* Return true if FNDECL shouldn't be folded right now.
10648 If a built-in function has an inline attribute always_inline
10649 wrapper, defer folding it after always_inline functions have
10650 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10651 might not be performed. */
10654 avoid_folding_inline_builtin (tree fndecl)
10656 return (DECL_DECLARED_INLINE_P (fndecl)
10657 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10659 && !cfun->always_inline_functions_inlined
10660 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10663 /* A wrapper function for builtin folding that prevents warnings for
10664 "statement without effect" and the like, caused by removing the
10665 call node earlier than the warning is generated. */
10668 fold_call_expr (location_t loc, tree exp, bool ignore)
10670 tree ret = NULL_TREE;
10671 tree fndecl = get_callee_fndecl (exp);
10673 && TREE_CODE (fndecl) == FUNCTION_DECL
10674 && DECL_BUILT_IN (fndecl)
10675 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10676 yet. Defer folding until we see all the arguments
10677 (after inlining). */
10678 && !CALL_EXPR_VA_ARG_PACK (exp))
10680 int nargs = call_expr_nargs (exp);
10682 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10683 instead last argument is __builtin_va_arg_pack (). Defer folding
10684 even in that case, until arguments are finalized. */
10685 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10687 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10689 && TREE_CODE (fndecl2) == FUNCTION_DECL
10690 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10691 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10695 if (avoid_folding_inline_builtin (fndecl))
10698 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10699 return targetm.fold_builtin (fndecl, exp, ignore);
10702 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10704 tree *args = CALL_EXPR_ARGP (exp);
10705 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10708 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10716 /* Conveniently construct a function call expression. FNDECL names the
10717 function to be called and ARGLIST is a TREE_LIST of arguments. */
10720 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10722 tree fntype = TREE_TYPE (fndecl);
10723 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10724 int n = list_length (arglist);
10725 tree *argarray = (tree *) alloca (n * sizeof (tree));
10728 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10729 argarray[i] = TREE_VALUE (arglist);
10730 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10733 /* Conveniently construct a function call expression. FNDECL names the
10734 function to be called, N is the number of arguments, and the "..."
10735 parameters are the argument expressions. */
10738 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10741 tree fntype = TREE_TYPE (fndecl);
10742 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10743 tree *argarray = (tree *) alloca (n * sizeof (tree));
10747 for (i = 0; i < n; i++)
10748 argarray[i] = va_arg (ap, tree);
10750 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10753 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10754 N arguments are passed in the array ARGARRAY. */
10757 fold_builtin_call_array (location_t loc, tree type,
10762 tree ret = NULL_TREE;
10766 if (TREE_CODE (fn) == ADDR_EXPR)
10768 tree fndecl = TREE_OPERAND (fn, 0);
10769 if (TREE_CODE (fndecl) == FUNCTION_DECL
10770 && DECL_BUILT_IN (fndecl))
10772 /* If last argument is __builtin_va_arg_pack (), arguments to this
10773 function are not finalized yet. Defer folding until they are. */
10774 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10776 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10778 && TREE_CODE (fndecl2) == FUNCTION_DECL
10779 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10780 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10781 return build_call_array_loc (loc, type, fn, n, argarray);
10783 if (avoid_folding_inline_builtin (fndecl))
10784 return build_call_array_loc (loc, type, fn, n, argarray);
10785 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10787 tree arglist = NULL_TREE;
10788 for (i = n - 1; i >= 0; i--)
10789 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10790 ret = targetm.fold_builtin (fndecl, arglist, false);
10793 return build_call_array_loc (loc, type, fn, n, argarray);
10795 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10797 /* First try the transformations that don't require consing up
10799 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10804 /* If we got this far, we need to build an exp. */
10805 exp = build_call_array_loc (loc, type, fn, n, argarray);
10806 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10807 return ret ? ret : exp;
10811 return build_call_array_loc (loc, type, fn, n, argarray);
10814 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10815 along with N new arguments specified as the "..." parameters. SKIP
10816 is the number of arguments in EXP to be omitted. This function is used
10817 to do varargs-to-varargs transformations. */
10820 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10822 int oldnargs = call_expr_nargs (exp);
10823 int nargs = oldnargs - skip + n;
10824 tree fntype = TREE_TYPE (fndecl);
10825 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10833 buffer = XALLOCAVEC (tree, nargs);
10835 for (i = 0; i < n; i++)
10836 buffer[i] = va_arg (ap, tree);
10838 for (j = skip; j < oldnargs; j++, i++)
10839 buffer[i] = CALL_EXPR_ARG (exp, j);
10842 buffer = CALL_EXPR_ARGP (exp) + skip;
10844 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10847 /* Validate a single argument ARG against a tree code CODE representing
10851 validate_arg (const_tree arg, enum tree_code code)
10855 else if (code == POINTER_TYPE)
10856 return POINTER_TYPE_P (TREE_TYPE (arg));
10857 else if (code == INTEGER_TYPE)
10858 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10859 return code == TREE_CODE (TREE_TYPE (arg));
10862 /* This function validates the types of a function call argument list
10863 against a specified list of tree_codes. If the last specifier is a 0,
10864 that represents an ellipses, otherwise the last specifier must be a
10867 This is the GIMPLE version of validate_arglist. Eventually we want to
10868 completely convert builtins.c to work from GIMPLEs and the tree based
10869 validate_arglist will then be removed. */
10872 validate_gimple_arglist (const_gimple call, ...)
10874 enum tree_code code;
10880 va_start (ap, call);
10885 code = (enum tree_code) va_arg (ap, int);
10889 /* This signifies an ellipses, any further arguments are all ok. */
10893 /* This signifies an endlink, if no arguments remain, return
10894 true, otherwise return false. */
10895 res = (i == gimple_call_num_args (call));
10898 /* If no parameters remain or the parameter's code does not
10899 match the specified code, return false. Otherwise continue
10900 checking any remaining arguments. */
10901 arg = gimple_call_arg (call, i++);
10902 if (!validate_arg (arg, code))
10909 /* We need gotos here since we can only have one VA_CLOSE in a
10917 /* This function validates the types of a function call argument list
10918 against a specified list of tree_codes. If the last specifier is a 0,
10919 that represents an ellipses, otherwise the last specifier must be a
10923 validate_arglist (const_tree callexpr, ...)
10925 enum tree_code code;
10928 const_call_expr_arg_iterator iter;
10931 va_start (ap, callexpr);
10932 init_const_call_expr_arg_iterator (callexpr, &iter);
10936 code = (enum tree_code) va_arg (ap, int);
10940 /* This signifies an ellipses, any further arguments are all ok. */
10944 /* This signifies an endlink, if no arguments remain, return
10945 true, otherwise return false. */
10946 res = !more_const_call_expr_args_p (&iter);
10949 /* If no parameters remain or the parameter's code does not
10950 match the specified code, return false. Otherwise continue
10951 checking any remaining arguments. */
10952 arg = next_const_call_expr_arg (&iter);
10953 if (!validate_arg (arg, code))
10960 /* We need gotos here since we can only have one VA_CLOSE in a
10968 /* Default target-specific builtin expander that does nothing. */
10971 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10972 rtx target ATTRIBUTE_UNUSED,
10973 rtx subtarget ATTRIBUTE_UNUSED,
10974 enum machine_mode mode ATTRIBUTE_UNUSED,
10975 int ignore ATTRIBUTE_UNUSED)
10980 /* Returns true is EXP represents data that would potentially reside
10981 in a readonly section. */
10984 readonly_data_expr (tree exp)
10988 if (TREE_CODE (exp) != ADDR_EXPR)
10991 exp = get_base_address (TREE_OPERAND (exp, 0));
10995 /* Make sure we call decl_readonly_section only for trees it
10996 can handle (since it returns true for everything it doesn't
10998 if (TREE_CODE (exp) == STRING_CST
10999 || TREE_CODE (exp) == CONSTRUCTOR
11000 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11001 return decl_readonly_section (exp, 0);
11006 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11007 to the call, and TYPE is its return type.
11009 Return NULL_TREE if no simplification was possible, otherwise return the
11010 simplified form of the call as a tree.
11012 The simplified form may be a constant or other expression which
11013 computes the same value, but in a more efficient manner (including
11014 calls to other builtin functions).
11016 The call may contain arguments which need to be evaluated, but
11017 which are not useful to determine the result of the call. In
11018 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11019 COMPOUND_EXPR will be an argument which must be evaluated.
11020 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11021 COMPOUND_EXPR in the chain will contain the tree for the simplified
11022 form of the builtin function call. */
11025 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11027 if (!validate_arg (s1, POINTER_TYPE)
11028 || !validate_arg (s2, POINTER_TYPE))
11033 const char *p1, *p2;
11035 p2 = c_getstr (s2);
11039 p1 = c_getstr (s1);
11042 const char *r = strstr (p1, p2);
11046 return build_int_cst (TREE_TYPE (s1), 0);
11048 /* Return an offset into the constant string argument. */
11049 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11050 s1, size_int (r - p1));
11051 return fold_convert_loc (loc, type, tem);
11054 /* The argument is const char *, and the result is char *, so we need
11055 a type conversion here to avoid a warning. */
11057 return fold_convert_loc (loc, type, s1);
11062 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11066 /* New argument list transforming strstr(s1, s2) to
11067 strchr(s1, s2[0]). */
11068 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11072 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11073 the call, and TYPE is its return type.
11075 Return NULL_TREE if no simplification was possible, otherwise return the
11076 simplified form of the call as a tree.
11078 The simplified form may be a constant or other expression which
11079 computes the same value, but in a more efficient manner (including
11080 calls to other builtin functions).
11082 The call may contain arguments which need to be evaluated, but
11083 which are not useful to determine the result of the call. In
11084 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11085 COMPOUND_EXPR will be an argument which must be evaluated.
11086 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11087 COMPOUND_EXPR in the chain will contain the tree for the simplified
11088 form of the builtin function call. */
11091 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11093 if (!validate_arg (s1, POINTER_TYPE)
11094 || !validate_arg (s2, INTEGER_TYPE))
11100 if (TREE_CODE (s2) != INTEGER_CST)
11103 p1 = c_getstr (s1);
11110 if (target_char_cast (s2, &c))
11113 r = strchr (p1, c);
11116 return build_int_cst (TREE_TYPE (s1), 0);
11118 /* Return an offset into the constant string argument. */
11119 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11120 s1, size_int (r - p1));
11121 return fold_convert_loc (loc, type, tem);
11127 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11128 the call, and TYPE is its return type.
11130 Return NULL_TREE if no simplification was possible, otherwise return the
11131 simplified form of the call as a tree.
11133 The simplified form may be a constant or other expression which
11134 computes the same value, but in a more efficient manner (including
11135 calls to other builtin functions).
11137 The call may contain arguments which need to be evaluated, but
11138 which are not useful to determine the result of the call. In
11139 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11140 COMPOUND_EXPR will be an argument which must be evaluated.
11141 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11142 COMPOUND_EXPR in the chain will contain the tree for the simplified
11143 form of the builtin function call. */
11146 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11148 if (!validate_arg (s1, POINTER_TYPE)
11149 || !validate_arg (s2, INTEGER_TYPE))
11156 if (TREE_CODE (s2) != INTEGER_CST)
11159 p1 = c_getstr (s1);
11166 if (target_char_cast (s2, &c))
11169 r = strrchr (p1, c);
11172 return build_int_cst (TREE_TYPE (s1), 0);
11174 /* Return an offset into the constant string argument. */
11175 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11176 s1, size_int (r - p1));
11177 return fold_convert_loc (loc, type, tem);
11180 if (! integer_zerop (s2))
11183 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11187 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11188 return build_call_expr_loc (loc, fn, 2, s1, s2);
11192 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11193 to the call, and TYPE is its return type.
11195 Return NULL_TREE if no simplification was possible, otherwise return the
11196 simplified form of the call as a tree.
11198 The simplified form may be a constant or other expression which
11199 computes the same value, but in a more efficient manner (including
11200 calls to other builtin functions).
11202 The call may contain arguments which need to be evaluated, but
11203 which are not useful to determine the result of the call. In
11204 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11205 COMPOUND_EXPR will be an argument which must be evaluated.
11206 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11207 COMPOUND_EXPR in the chain will contain the tree for the simplified
11208 form of the builtin function call. */
11211 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11213 if (!validate_arg (s1, POINTER_TYPE)
11214 || !validate_arg (s2, POINTER_TYPE))
11219 const char *p1, *p2;
11221 p2 = c_getstr (s2);
11225 p1 = c_getstr (s1);
11228 const char *r = strpbrk (p1, p2);
11232 return build_int_cst (TREE_TYPE (s1), 0);
11234 /* Return an offset into the constant string argument. */
11235 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11236 s1, size_int (r - p1));
11237 return fold_convert_loc (loc, type, tem);
11241 /* strpbrk(x, "") == NULL.
11242 Evaluate and ignore s1 in case it had side-effects. */
11243 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11246 return NULL_TREE; /* Really call strpbrk. */
11248 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11252 /* New argument list transforming strpbrk(s1, s2) to
11253 strchr(s1, s2[0]). */
11254 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11258 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11261 Return NULL_TREE if no simplification was possible, otherwise return the
11262 simplified form of the call as a tree.
11264 The simplified form may be a constant or other expression which
11265 computes the same value, but in a more efficient manner (including
11266 calls to other builtin functions).
11268 The call may contain arguments which need to be evaluated, but
11269 which are not useful to determine the result of the call. In
11270 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11271 COMPOUND_EXPR will be an argument which must be evaluated.
11272 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11273 COMPOUND_EXPR in the chain will contain the tree for the simplified
11274 form of the builtin function call. */
11277 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11279 if (!validate_arg (dst, POINTER_TYPE)
11280 || !validate_arg (src, POINTER_TYPE))
11284 const char *p = c_getstr (src);
11286 /* If the string length is zero, return the dst parameter. */
11287 if (p && *p == '\0')
11290 if (optimize_insn_for_speed_p ())
11292 /* See if we can store by pieces into (dst + strlen(dst)). */
11294 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11295 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11297 if (!strlen_fn || !strcpy_fn)
11300 /* If we don't have a movstr we don't want to emit an strcpy
11301 call. We have to do that if the length of the source string
11302 isn't computable (in that case we can use memcpy probably
11303 later expanding to a sequence of mov instructions). If we
11304 have movstr instructions we can emit strcpy calls. */
11307 tree len = c_strlen (src, 1);
11308 if (! len || TREE_SIDE_EFFECTS (len))
11312 /* Stabilize the argument list. */
11313 dst = builtin_save_expr (dst);
11315 /* Create strlen (dst). */
11316 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11317 /* Create (dst p+ strlen (dst)). */
11319 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11320 TREE_TYPE (dst), dst, newdst);
11321 newdst = builtin_save_expr (newdst);
11323 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11324 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11330 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11331 arguments to the call.
11333 Return NULL_TREE if no simplification was possible, otherwise return the
11334 simplified form of the call as a tree.
11336 The simplified form may be a constant or other expression which
11337 computes the same value, but in a more efficient manner (including
11338 calls to other builtin functions).
11340 The call may contain arguments which need to be evaluated, but
11341 which are not useful to determine the result of the call. In
11342 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11343 COMPOUND_EXPR will be an argument which must be evaluated.
11344 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11345 COMPOUND_EXPR in the chain will contain the tree for the simplified
11346 form of the builtin function call. */
11349 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11351 if (!validate_arg (dst, POINTER_TYPE)
11352 || !validate_arg (src, POINTER_TYPE)
11353 || !validate_arg (len, INTEGER_TYPE))
11357 const char *p = c_getstr (src);
11359 /* If the requested length is zero, or the src parameter string
11360 length is zero, return the dst parameter. */
11361 if (integer_zerop (len) || (p && *p == '\0'))
11362 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11364 /* If the requested len is greater than or equal to the string
11365 length, call strcat. */
11366 if (TREE_CODE (len) == INTEGER_CST && p
11367 && compare_tree_int (len, strlen (p)) >= 0)
11369 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11371 /* If the replacement _DECL isn't initialized, don't do the
11376 return build_call_expr_loc (loc, fn, 2, dst, src);
11382 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11385 Return NULL_TREE if no simplification was possible, otherwise return the
11386 simplified form of the call as a tree.
11388 The simplified form may be a constant or other expression which
11389 computes the same value, but in a more efficient manner (including
11390 calls to other builtin functions).
11392 The call may contain arguments which need to be evaluated, but
11393 which are not useful to determine the result of the call. In
11394 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11395 COMPOUND_EXPR will be an argument which must be evaluated.
11396 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11397 COMPOUND_EXPR in the chain will contain the tree for the simplified
11398 form of the builtin function call. */
11401 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11403 if (!validate_arg (s1, POINTER_TYPE)
11404 || !validate_arg (s2, POINTER_TYPE))
11408 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11410 /* If both arguments are constants, evaluate at compile-time. */
11413 const size_t r = strspn (p1, p2);
11414 return size_int (r);
11417 /* If either argument is "", return NULL_TREE. */
11418 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11419 /* Evaluate and ignore both arguments in case either one has
11421 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11427 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11430 Return NULL_TREE if no simplification was possible, otherwise return the
11431 simplified form of the call as a tree.
11433 The simplified form may be a constant or other expression which
11434 computes the same value, but in a more efficient manner (including
11435 calls to other builtin functions).
11437 The call may contain arguments which need to be evaluated, but
11438 which are not useful to determine the result of the call. In
11439 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11440 COMPOUND_EXPR will be an argument which must be evaluated.
11441 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11442 COMPOUND_EXPR in the chain will contain the tree for the simplified
11443 form of the builtin function call. */
11446 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11448 if (!validate_arg (s1, POINTER_TYPE)
11449 || !validate_arg (s2, POINTER_TYPE))
11453 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11455 /* If both arguments are constants, evaluate at compile-time. */
11458 const size_t r = strcspn (p1, p2);
11459 return size_int (r);
11462 /* If the first argument is "", return NULL_TREE. */
11463 if (p1 && *p1 == '\0')
11465 /* Evaluate and ignore argument s2 in case it has
11467 return omit_one_operand_loc (loc, size_type_node,
11468 size_zero_node, s2);
11471 /* If the second argument is "", return __builtin_strlen(s1). */
11472 if (p2 && *p2 == '\0')
11474 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11476 /* If the replacement _DECL isn't initialized, don't do the
11481 return build_call_expr_loc (loc, fn, 1, s1);
11487 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11488 to the call. IGNORE is true if the value returned
11489 by the builtin will be ignored. UNLOCKED is true is true if this
11490 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11491 the known length of the string. Return NULL_TREE if no simplification
11495 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11496 bool ignore, bool unlocked, tree len)
11498 /* If we're using an unlocked function, assume the other unlocked
11499 functions exist explicitly. */
11500 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11501 : implicit_built_in_decls[BUILT_IN_FPUTC];
11502 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11503 : implicit_built_in_decls[BUILT_IN_FWRITE];
11505 /* If the return value is used, don't do the transformation. */
11509 /* Verify the arguments in the original call. */
11510 if (!validate_arg (arg0, POINTER_TYPE)
11511 || !validate_arg (arg1, POINTER_TYPE))
11515 len = c_strlen (arg0, 0);
11517 /* Get the length of the string passed to fputs. If the length
11518 can't be determined, punt. */
11520 || TREE_CODE (len) != INTEGER_CST)
11523 switch (compare_tree_int (len, 1))
11525 case -1: /* length is 0, delete the call entirely . */
11526 return omit_one_operand_loc (loc, integer_type_node,
11527 integer_zero_node, arg1);;
11529 case 0: /* length is 1, call fputc. */
11531 const char *p = c_getstr (arg0);
11536 return build_call_expr_loc (loc, fn_fputc, 2,
11537 build_int_cst (NULL_TREE, p[0]), arg1);
11543 case 1: /* length is greater than 1, call fwrite. */
11545 /* If optimizing for size keep fputs. */
11546 if (optimize_function_for_size_p (cfun))
11548 /* New argument list transforming fputs(string, stream) to
11549 fwrite(string, 1, len, stream). */
11551 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11552 size_one_node, len, arg1);
11557 gcc_unreachable ();
11562 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11563 produced. False otherwise. This is done so that we don't output the error
11564 or warning twice or three times. */
11567 fold_builtin_next_arg (tree exp, bool va_start_p)
11569 tree fntype = TREE_TYPE (current_function_decl);
11570 int nargs = call_expr_nargs (exp);
11573 if (TYPE_ARG_TYPES (fntype) == 0
11574 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11575 == void_type_node))
11577 error ("%<va_start%> used in function with fixed args");
11583 if (va_start_p && (nargs != 2))
11585 error ("wrong number of arguments to function %<va_start%>");
11588 arg = CALL_EXPR_ARG (exp, 1);
11590 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11591 when we checked the arguments and if needed issued a warning. */
11596 /* Evidently an out of date version of <stdarg.h>; can't validate
11597 va_start's second argument, but can still work as intended. */
11598 warning (0, "%<__builtin_next_arg%> called without an argument");
11601 else if (nargs > 1)
11603 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11606 arg = CALL_EXPR_ARG (exp, 0);
11609 if (TREE_CODE (arg) == SSA_NAME)
11610 arg = SSA_NAME_VAR (arg);
11612 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11613 or __builtin_next_arg (0) the first time we see it, after checking
11614 the arguments and if needed issuing a warning. */
11615 if (!integer_zerop (arg))
11617 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11619 /* Strip off all nops for the sake of the comparison. This
11620 is not quite the same as STRIP_NOPS. It does more.
11621 We must also strip off INDIRECT_EXPR for C++ reference
11623 while (CONVERT_EXPR_P (arg)
11624 || TREE_CODE (arg) == INDIRECT_REF)
11625 arg = TREE_OPERAND (arg, 0);
11626 if (arg != last_parm)
11628 /* FIXME: Sometimes with the tree optimizers we can get the
11629 not the last argument even though the user used the last
11630 argument. We just warn and set the arg to be the last
11631 argument so that we will get wrong-code because of
11633 warning (0, "second parameter of %<va_start%> not last named argument");
11636 /* Undefined by C99 7.15.1.4p4 (va_start):
11637 "If the parameter parmN is declared with the register storage
11638 class, with a function or array type, or with a type that is
11639 not compatible with the type that results after application of
11640 the default argument promotions, the behavior is undefined."
11642 else if (DECL_REGISTER (arg))
11643 warning (0, "undefined behaviour when second parameter of "
11644 "%<va_start%> is declared with %<register%> storage");
11646 /* We want to verify the second parameter just once before the tree
11647 optimizers are run and then avoid keeping it in the tree,
11648 as otherwise we could warn even for correct code like:
11649 void foo (int i, ...)
11650 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11652 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11654 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11660 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11661 ORIG may be null if this is a 2-argument call. We don't attempt to
11662 simplify calls with more than 3 arguments.
11664 Return NULL_TREE if no simplification was possible, otherwise return the
11665 simplified form of the call as a tree. If IGNORED is true, it means that
11666 the caller does not use the returned value of the function. */
11669 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11670 tree orig, int ignored)
11673 const char *fmt_str = NULL;
11675 /* Verify the required arguments in the original call. We deal with two
11676 types of sprintf() calls: 'sprintf (str, fmt)' and
11677 'sprintf (dest, "%s", orig)'. */
11678 if (!validate_arg (dest, POINTER_TYPE)
11679 || !validate_arg (fmt, POINTER_TYPE))
11681 if (orig && !validate_arg (orig, POINTER_TYPE))
11684 /* Check whether the format is a literal string constant. */
11685 fmt_str = c_getstr (fmt);
11686 if (fmt_str == NULL)
11690 retval = NULL_TREE;
11692 if (!init_target_chars ())
11695 /* If the format doesn't contain % args or %%, use strcpy. */
11696 if (strchr (fmt_str, target_percent) == NULL)
11698 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11703 /* Don't optimize sprintf (buf, "abc", ptr++). */
11707 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11708 'format' is known to contain no % formats. */
11709 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11711 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11714 /* If the format is "%s", use strcpy if the result isn't used. */
11715 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11718 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11723 /* Don't crash on sprintf (str1, "%s"). */
11727 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11730 retval = c_strlen (orig, 1);
11731 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11734 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11737 if (call && retval)
11739 retval = fold_convert_loc
11740 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11742 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11748 /* Expand a call EXP to __builtin_object_size. */
11751 expand_builtin_object_size (tree exp)
11754 int object_size_type;
11755 tree fndecl = get_callee_fndecl (exp);
11757 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11759 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11761 expand_builtin_trap ();
11765 ost = CALL_EXPR_ARG (exp, 1);
11768 if (TREE_CODE (ost) != INTEGER_CST
11769 || tree_int_cst_sgn (ost) < 0
11770 || compare_tree_int (ost, 3) > 0)
11772 error ("%Klast argument of %D is not integer constant between 0 and 3",
11774 expand_builtin_trap ();
11778 object_size_type = tree_low_cst (ost, 0);
11780 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11783 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11784 FCODE is the BUILT_IN_* to use.
11785 Return NULL_RTX if we failed; the caller should emit a normal call,
11786 otherwise try to get the result in TARGET, if convenient (and in
11787 mode MODE if that's convenient). */
11790 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11791 enum built_in_function fcode)
11793 tree dest, src, len, size;
11795 if (!validate_arglist (exp,
11797 fcode == BUILT_IN_MEMSET_CHK
11798 ? INTEGER_TYPE : POINTER_TYPE,
11799 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11802 dest = CALL_EXPR_ARG (exp, 0);
11803 src = CALL_EXPR_ARG (exp, 1);
11804 len = CALL_EXPR_ARG (exp, 2);
11805 size = CALL_EXPR_ARG (exp, 3);
11807 if (! host_integerp (size, 1))
11810 if (host_integerp (len, 1) || integer_all_onesp (size))
11814 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11816 warning_at (tree_nonartificial_location (exp),
11817 0, "%Kcall to %D will always overflow destination buffer",
11818 exp, get_callee_fndecl (exp));
11823 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11824 mem{cpy,pcpy,move,set} is available. */
11827 case BUILT_IN_MEMCPY_CHK:
11828 fn = built_in_decls[BUILT_IN_MEMCPY];
11830 case BUILT_IN_MEMPCPY_CHK:
11831 fn = built_in_decls[BUILT_IN_MEMPCPY];
11833 case BUILT_IN_MEMMOVE_CHK:
11834 fn = built_in_decls[BUILT_IN_MEMMOVE];
11836 case BUILT_IN_MEMSET_CHK:
11837 fn = built_in_decls[BUILT_IN_MEMSET];
11846 fn = build_call_nofold (fn, 3, dest, src, len);
11847 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11848 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11849 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11851 else if (fcode == BUILT_IN_MEMSET_CHK)
11855 unsigned int dest_align
11856 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11858 /* If DEST is not a pointer type, call the normal function. */
11859 if (dest_align == 0)
11862 /* If SRC and DEST are the same (and not volatile), do nothing. */
11863 if (operand_equal_p (src, dest, 0))
11867 if (fcode != BUILT_IN_MEMPCPY_CHK)
11869 /* Evaluate and ignore LEN in case it has side-effects. */
11870 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11871 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11874 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11875 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11878 /* __memmove_chk special case. */
11879 if (fcode == BUILT_IN_MEMMOVE_CHK)
11881 unsigned int src_align
11882 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11884 if (src_align == 0)
11887 /* If src is categorized for a readonly section we can use
11888 normal __memcpy_chk. */
11889 if (readonly_data_expr (src))
11891 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11894 fn = build_call_nofold (fn, 4, dest, src, len, size);
11895 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11896 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11897 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11904 /* Emit warning if a buffer overflow is detected at compile time. */
11907 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11911 location_t loc = tree_nonartificial_location (exp);
11915 case BUILT_IN_STRCPY_CHK:
11916 case BUILT_IN_STPCPY_CHK:
11917 /* For __strcat_chk the warning will be emitted only if overflowing
11918 by at least strlen (dest) + 1 bytes. */
11919 case BUILT_IN_STRCAT_CHK:
11920 len = CALL_EXPR_ARG (exp, 1);
11921 size = CALL_EXPR_ARG (exp, 2);
11924 case BUILT_IN_STRNCAT_CHK:
11925 case BUILT_IN_STRNCPY_CHK:
11926 len = CALL_EXPR_ARG (exp, 2);
11927 size = CALL_EXPR_ARG (exp, 3);
11929 case BUILT_IN_SNPRINTF_CHK:
11930 case BUILT_IN_VSNPRINTF_CHK:
11931 len = CALL_EXPR_ARG (exp, 1);
11932 size = CALL_EXPR_ARG (exp, 3);
11935 gcc_unreachable ();
11941 if (! host_integerp (size, 1) || integer_all_onesp (size))
11946 len = c_strlen (len, 1);
11947 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11950 else if (fcode == BUILT_IN_STRNCAT_CHK)
11952 tree src = CALL_EXPR_ARG (exp, 1);
11953 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11955 src = c_strlen (src, 1);
11956 if (! src || ! host_integerp (src, 1))
11958 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11959 exp, get_callee_fndecl (exp));
11962 else if (tree_int_cst_lt (src, size))
11965 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11968 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11969 exp, get_callee_fndecl (exp));
11972 /* Emit warning if a buffer overflow is detected at compile time
11973 in __sprintf_chk/__vsprintf_chk calls. */
11976 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11978 tree size, len, fmt;
11979 const char *fmt_str;
11980 int nargs = call_expr_nargs (exp);
11982 /* Verify the required arguments in the original call. */
11986 size = CALL_EXPR_ARG (exp, 2);
11987 fmt = CALL_EXPR_ARG (exp, 3);
11989 if (! host_integerp (size, 1) || integer_all_onesp (size))
11992 /* Check whether the format is a literal string constant. */
11993 fmt_str = c_getstr (fmt);
11994 if (fmt_str == NULL)
11997 if (!init_target_chars ())
12000 /* If the format doesn't contain % args or %%, we know its size. */
12001 if (strchr (fmt_str, target_percent) == 0)
12002 len = build_int_cstu (size_type_node, strlen (fmt_str));
12003 /* If the format is "%s" and first ... argument is a string literal,
12005 else if (fcode == BUILT_IN_SPRINTF_CHK
12006 && strcmp (fmt_str, target_percent_s) == 0)
12012 arg = CALL_EXPR_ARG (exp, 4);
12013 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12016 len = c_strlen (arg, 1);
12017 if (!len || ! host_integerp (len, 1))
12023 if (! tree_int_cst_lt (len, size))
12024 warning_at (tree_nonartificial_location (exp),
12025 0, "%Kcall to %D will always overflow destination buffer",
12026 exp, get_callee_fndecl (exp));
12029 /* Emit warning if a free is called with address of a variable. */
12032 maybe_emit_free_warning (tree exp)
12034 tree arg = CALL_EXPR_ARG (exp, 0);
12037 if (TREE_CODE (arg) != ADDR_EXPR)
12040 arg = get_base_address (TREE_OPERAND (arg, 0));
12041 if (arg == NULL || INDIRECT_REF_P (arg))
12044 if (SSA_VAR_P (arg))
12045 warning_at (tree_nonartificial_location (exp),
12046 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12048 warning_at (tree_nonartificial_location (exp),
12049 0, "%Kattempt to free a non-heap object", exp);
12052 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12056 fold_builtin_object_size (tree ptr, tree ost)
12058 tree ret = NULL_TREE;
12059 int object_size_type;
12061 if (!validate_arg (ptr, POINTER_TYPE)
12062 || !validate_arg (ost, INTEGER_TYPE))
12067 if (TREE_CODE (ost) != INTEGER_CST
12068 || tree_int_cst_sgn (ost) < 0
12069 || compare_tree_int (ost, 3) > 0)
12072 object_size_type = tree_low_cst (ost, 0);
12074 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12075 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12076 and (size_t) 0 for types 2 and 3. */
12077 if (TREE_SIDE_EFFECTS (ptr))
12078 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12080 if (TREE_CODE (ptr) == ADDR_EXPR)
12081 ret = build_int_cstu (size_type_node,
12082 compute_builtin_object_size (ptr, object_size_type));
12084 else if (TREE_CODE (ptr) == SSA_NAME)
12086 unsigned HOST_WIDE_INT bytes;
12088 /* If object size is not known yet, delay folding until
12089 later. Maybe subsequent passes will help determining
12091 bytes = compute_builtin_object_size (ptr, object_size_type);
12092 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12094 ret = build_int_cstu (size_type_node, bytes);
12099 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12100 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12101 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12108 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12109 DEST, SRC, LEN, and SIZE are the arguments to the call.
12110 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12111 code of the builtin. If MAXLEN is not NULL, it is maximum length
12112 passed as third argument. */
12115 fold_builtin_memory_chk (location_t loc, tree fndecl,
12116 tree dest, tree src, tree len, tree size,
12117 tree maxlen, bool ignore,
12118 enum built_in_function fcode)
12122 if (!validate_arg (dest, POINTER_TYPE)
12123 || !validate_arg (src,
12124 (fcode == BUILT_IN_MEMSET_CHK
12125 ? INTEGER_TYPE : POINTER_TYPE))
12126 || !validate_arg (len, INTEGER_TYPE)
12127 || !validate_arg (size, INTEGER_TYPE))
12130 /* If SRC and DEST are the same (and not volatile), return DEST
12131 (resp. DEST+LEN for __mempcpy_chk). */
12132 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12134 if (fcode != BUILT_IN_MEMPCPY_CHK)
12135 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12139 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12141 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12145 if (! host_integerp (size, 1))
12148 if (! integer_all_onesp (size))
12150 if (! host_integerp (len, 1))
12152 /* If LEN is not constant, try MAXLEN too.
12153 For MAXLEN only allow optimizing into non-_ocs function
12154 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12155 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12157 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12159 /* (void) __mempcpy_chk () can be optimized into
12160 (void) __memcpy_chk (). */
12161 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12165 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12173 if (tree_int_cst_lt (size, maxlen))
12178 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12179 mem{cpy,pcpy,move,set} is available. */
12182 case BUILT_IN_MEMCPY_CHK:
12183 fn = built_in_decls[BUILT_IN_MEMCPY];
12185 case BUILT_IN_MEMPCPY_CHK:
12186 fn = built_in_decls[BUILT_IN_MEMPCPY];
12188 case BUILT_IN_MEMMOVE_CHK:
12189 fn = built_in_decls[BUILT_IN_MEMMOVE];
12191 case BUILT_IN_MEMSET_CHK:
12192 fn = built_in_decls[BUILT_IN_MEMSET];
12201 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12204 /* Fold a call to the __st[rp]cpy_chk builtin.
12205 DEST, SRC, and SIZE are the arguments to the call.
12206 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12207 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12208 strings passed as second argument. */
12211 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12212 tree src, tree size,
12213 tree maxlen, bool ignore,
12214 enum built_in_function fcode)
12218 if (!validate_arg (dest, POINTER_TYPE)
12219 || !validate_arg (src, POINTER_TYPE)
12220 || !validate_arg (size, INTEGER_TYPE))
12223 /* If SRC and DEST are the same (and not volatile), return DEST. */
12224 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12225 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12227 if (! host_integerp (size, 1))
12230 if (! integer_all_onesp (size))
12232 len = c_strlen (src, 1);
12233 if (! len || ! host_integerp (len, 1))
12235 /* If LEN is not constant, try MAXLEN too.
12236 For MAXLEN only allow optimizing into non-_ocs function
12237 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12238 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12240 if (fcode == BUILT_IN_STPCPY_CHK)
12245 /* If return value of __stpcpy_chk is ignored,
12246 optimize into __strcpy_chk. */
12247 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12251 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12254 if (! len || TREE_SIDE_EFFECTS (len))
12257 /* If c_strlen returned something, but not a constant,
12258 transform __strcpy_chk into __memcpy_chk. */
12259 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12263 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12264 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12265 build_call_expr_loc (loc, fn, 4,
12266 dest, src, len, size));
12272 if (! tree_int_cst_lt (maxlen, size))
12276 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12277 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12278 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12282 return build_call_expr_loc (loc, fn, 2, dest, src);
12285 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12286 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12287 length passed as third argument. */
12290 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12291 tree len, tree size, tree maxlen)
12295 if (!validate_arg (dest, POINTER_TYPE)
12296 || !validate_arg (src, POINTER_TYPE)
12297 || !validate_arg (len, INTEGER_TYPE)
12298 || !validate_arg (size, INTEGER_TYPE))
12301 if (! host_integerp (size, 1))
12304 if (! integer_all_onesp (size))
12306 if (! host_integerp (len, 1))
12308 /* If LEN is not constant, try MAXLEN too.
12309 For MAXLEN only allow optimizing into non-_ocs function
12310 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12311 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12317 if (tree_int_cst_lt (size, maxlen))
12321 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12322 fn = built_in_decls[BUILT_IN_STRNCPY];
12326 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12329 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12330 are the arguments to the call. */
12333 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12334 tree src, tree size)
12339 if (!validate_arg (dest, POINTER_TYPE)
12340 || !validate_arg (src, POINTER_TYPE)
12341 || !validate_arg (size, INTEGER_TYPE))
12344 p = c_getstr (src);
12345 /* If the SRC parameter is "", return DEST. */
12346 if (p && *p == '\0')
12347 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12349 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12352 /* If __builtin_strcat_chk is used, assume strcat is available. */
12353 fn = built_in_decls[BUILT_IN_STRCAT];
12357 return build_call_expr_loc (loc, fn, 2, dest, src);
12360 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12364 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12365 tree dest, tree src, tree len, tree size)
12370 if (!validate_arg (dest, POINTER_TYPE)
12371 || !validate_arg (src, POINTER_TYPE)
12372 || !validate_arg (size, INTEGER_TYPE)
12373 || !validate_arg (size, INTEGER_TYPE))
12376 p = c_getstr (src);
12377 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12378 if (p && *p == '\0')
12379 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12380 else if (integer_zerop (len))
12381 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12383 if (! host_integerp (size, 1))
12386 if (! integer_all_onesp (size))
12388 tree src_len = c_strlen (src, 1);
12390 && host_integerp (src_len, 1)
12391 && host_integerp (len, 1)
12392 && ! tree_int_cst_lt (len, src_len))
12394 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12395 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12399 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12404 /* If __builtin_strncat_chk is used, assume strncat is available. */
12405 fn = built_in_decls[BUILT_IN_STRNCAT];
12409 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12412 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12413 a normal call should be emitted rather than expanding the function
12414 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12417 fold_builtin_sprintf_chk (location_t loc, tree exp,
12418 enum built_in_function fcode)
12420 tree dest, size, len, fn, fmt, flag;
12421 const char *fmt_str;
12422 int nargs = call_expr_nargs (exp);
12424 /* Verify the required arguments in the original call. */
12427 dest = CALL_EXPR_ARG (exp, 0);
12428 if (!validate_arg (dest, POINTER_TYPE))
12430 flag = CALL_EXPR_ARG (exp, 1);
12431 if (!validate_arg (flag, INTEGER_TYPE))
12433 size = CALL_EXPR_ARG (exp, 2);
12434 if (!validate_arg (size, INTEGER_TYPE))
12436 fmt = CALL_EXPR_ARG (exp, 3);
12437 if (!validate_arg (fmt, POINTER_TYPE))
12440 if (! host_integerp (size, 1))
12445 if (!init_target_chars ())
12448 /* Check whether the format is a literal string constant. */
12449 fmt_str = c_getstr (fmt);
12450 if (fmt_str != NULL)
12452 /* If the format doesn't contain % args or %%, we know the size. */
12453 if (strchr (fmt_str, target_percent) == 0)
12455 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12456 len = build_int_cstu (size_type_node, strlen (fmt_str));
12458 /* If the format is "%s" and first ... argument is a string literal,
12459 we know the size too. */
12460 else if (fcode == BUILT_IN_SPRINTF_CHK
12461 && strcmp (fmt_str, target_percent_s) == 0)
12467 arg = CALL_EXPR_ARG (exp, 4);
12468 if (validate_arg (arg, POINTER_TYPE))
12470 len = c_strlen (arg, 1);
12471 if (! len || ! host_integerp (len, 1))
12478 if (! integer_all_onesp (size))
12480 if (! len || ! tree_int_cst_lt (len, size))
12484 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12485 or if format doesn't contain % chars or is "%s". */
12486 if (! integer_zerop (flag))
12488 if (fmt_str == NULL)
12490 if (strchr (fmt_str, target_percent) != NULL
12491 && strcmp (fmt_str, target_percent_s))
12495 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12496 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12497 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12501 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12504 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12505 a normal call should be emitted rather than expanding the function
12506 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12507 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12508 passed as second argument. */
12511 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12512 enum built_in_function fcode)
12514 tree dest, size, len, fn, fmt, flag;
12515 const char *fmt_str;
12517 /* Verify the required arguments in the original call. */
12518 if (call_expr_nargs (exp) < 5)
12520 dest = CALL_EXPR_ARG (exp, 0);
12521 if (!validate_arg (dest, POINTER_TYPE))
12523 len = CALL_EXPR_ARG (exp, 1);
12524 if (!validate_arg (len, INTEGER_TYPE))
12526 flag = CALL_EXPR_ARG (exp, 2);
12527 if (!validate_arg (flag, INTEGER_TYPE))
12529 size = CALL_EXPR_ARG (exp, 3);
12530 if (!validate_arg (size, INTEGER_TYPE))
12532 fmt = CALL_EXPR_ARG (exp, 4);
12533 if (!validate_arg (fmt, POINTER_TYPE))
12536 if (! host_integerp (size, 1))
12539 if (! integer_all_onesp (size))
12541 if (! host_integerp (len, 1))
12543 /* If LEN is not constant, try MAXLEN too.
12544 For MAXLEN only allow optimizing into non-_ocs function
12545 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12546 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12552 if (tree_int_cst_lt (size, maxlen))
12556 if (!init_target_chars ())
12559 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12560 or if format doesn't contain % chars or is "%s". */
12561 if (! integer_zerop (flag))
12563 fmt_str = c_getstr (fmt);
12564 if (fmt_str == NULL)
12566 if (strchr (fmt_str, target_percent) != NULL
12567 && strcmp (fmt_str, target_percent_s))
12571 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12573 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12574 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12578 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12581 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12582 FMT and ARG are the arguments to the call; we don't fold cases with
12583 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12585 Return NULL_TREE if no simplification was possible, otherwise return the
12586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12587 code of the function to be simplified. */
12590 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12591 tree arg, bool ignore,
12592 enum built_in_function fcode)
12594 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12595 const char *fmt_str = NULL;
12597 /* If the return value is used, don't do the transformation. */
12601 /* Verify the required arguments in the original call. */
12602 if (!validate_arg (fmt, POINTER_TYPE))
12605 /* Check whether the format is a literal string constant. */
12606 fmt_str = c_getstr (fmt);
12607 if (fmt_str == NULL)
12610 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12612 /* If we're using an unlocked function, assume the other
12613 unlocked functions exist explicitly. */
12614 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12615 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12619 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12620 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12623 if (!init_target_chars ())
12626 if (strcmp (fmt_str, target_percent_s) == 0
12627 || strchr (fmt_str, target_percent) == NULL)
12631 if (strcmp (fmt_str, target_percent_s) == 0)
12633 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12636 if (!arg || !validate_arg (arg, POINTER_TYPE))
12639 str = c_getstr (arg);
12645 /* The format specifier doesn't contain any '%' characters. */
12646 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12652 /* If the string was "", printf does nothing. */
12653 if (str[0] == '\0')
12654 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12656 /* If the string has length of 1, call putchar. */
12657 if (str[1] == '\0')
12659 /* Given printf("c"), (where c is any one character,)
12660 convert "c"[0] to an int and pass that to the replacement
12662 newarg = build_int_cst (NULL_TREE, str[0]);
12664 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12668 /* If the string was "string\n", call puts("string"). */
12669 size_t len = strlen (str);
12670 if ((unsigned char)str[len - 1] == target_newline)
12672 /* Create a NUL-terminated string that's one char shorter
12673 than the original, stripping off the trailing '\n'. */
12674 char *newstr = XALLOCAVEC (char, len);
12675 memcpy (newstr, str, len - 1);
12676 newstr[len - 1] = 0;
12678 newarg = build_string_literal (len, newstr);
12680 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12683 /* We'd like to arrange to call fputs(string,stdout) here,
12684 but we need stdout and don't have a way to get it yet. */
12689 /* The other optimizations can be done only on the non-va_list variants. */
12690 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12693 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12694 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12696 if (!arg || !validate_arg (arg, POINTER_TYPE))
12699 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12702 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12703 else if (strcmp (fmt_str, target_percent_c) == 0)
12705 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12708 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12714 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12717 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12718 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12719 more than 3 arguments, and ARG may be null in the 2-argument case.
12721 Return NULL_TREE if no simplification was possible, otherwise return the
12722 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12723 code of the function to be simplified. */
12726 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12727 tree fmt, tree arg, bool ignore,
12728 enum built_in_function fcode)
12730 tree fn_fputc, fn_fputs, call = NULL_TREE;
12731 const char *fmt_str = NULL;
12733 /* If the return value is used, don't do the transformation. */
12737 /* Verify the required arguments in the original call. */
12738 if (!validate_arg (fp, POINTER_TYPE))
12740 if (!validate_arg (fmt, POINTER_TYPE))
12743 /* Check whether the format is a literal string constant. */
12744 fmt_str = c_getstr (fmt);
12745 if (fmt_str == NULL)
12748 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12750 /* If we're using an unlocked function, assume the other
12751 unlocked functions exist explicitly. */
12752 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12753 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12757 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12758 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12761 if (!init_target_chars ())
12764 /* If the format doesn't contain % args or %%, use strcpy. */
12765 if (strchr (fmt_str, target_percent) == NULL)
12767 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12771 /* If the format specifier was "", fprintf does nothing. */
12772 if (fmt_str[0] == '\0')
12774 /* If FP has side-effects, just wait until gimplification is
12776 if (TREE_SIDE_EFFECTS (fp))
12779 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12782 /* When "string" doesn't contain %, replace all cases of
12783 fprintf (fp, string) with fputs (string, fp). The fputs
12784 builtin will take care of special cases like length == 1. */
12786 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12789 /* The other optimizations can be done only on the non-va_list variants. */
12790 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12793 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12794 else if (strcmp (fmt_str, target_percent_s) == 0)
12796 if (!arg || !validate_arg (arg, POINTER_TYPE))
12799 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12802 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12803 else if (strcmp (fmt_str, target_percent_c) == 0)
12805 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12808 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12813 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12816 /* Initialize format string characters in the target charset. */
12819 init_target_chars (void)
12824 target_newline = lang_hooks.to_target_charset ('\n');
12825 target_percent = lang_hooks.to_target_charset ('%');
12826 target_c = lang_hooks.to_target_charset ('c');
12827 target_s = lang_hooks.to_target_charset ('s');
12828 if (target_newline == 0 || target_percent == 0 || target_c == 0
12832 target_percent_c[0] = target_percent;
12833 target_percent_c[1] = target_c;
12834 target_percent_c[2] = '\0';
12836 target_percent_s[0] = target_percent;
12837 target_percent_s[1] = target_s;
12838 target_percent_s[2] = '\0';
12840 target_percent_s_newline[0] = target_percent;
12841 target_percent_s_newline[1] = target_s;
12842 target_percent_s_newline[2] = target_newline;
12843 target_percent_s_newline[3] = '\0';
12850 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12851 and no overflow/underflow occurred. INEXACT is true if M was not
12852 exactly calculated. TYPE is the tree type for the result. This
12853 function assumes that you cleared the MPFR flags and then
12854 calculated M to see if anything subsequently set a flag prior to
12855 entering this function. Return NULL_TREE if any checks fail. */
12858 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12860 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12861 overflow/underflow occurred. If -frounding-math, proceed iff the
12862 result of calling FUNC was exact. */
12863 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12864 && (!flag_rounding_math || !inexact))
12866 REAL_VALUE_TYPE rr;
12868 real_from_mpfr (&rr, m, type, GMP_RNDN);
12869 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12870 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12871 but the mpft_t is not, then we underflowed in the
12873 if (real_isfinite (&rr)
12874 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12876 REAL_VALUE_TYPE rmode;
12878 real_convert (&rmode, TYPE_MODE (type), &rr);
12879 /* Proceed iff the specified mode can hold the value. */
12880 if (real_identical (&rmode, &rr))
12881 return build_real (type, rmode);
12887 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12888 number and no overflow/underflow occurred. INEXACT is true if M
12889 was not exactly calculated. TYPE is the tree type for the result.
12890 This function assumes that you cleared the MPFR flags and then
12891 calculated M to see if anything subsequently set a flag prior to
12892 entering this function. Return NULL_TREE if any checks fail, if
12893 FORCE_CONVERT is true, then bypass the checks. */
12896 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12898 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12899 overflow/underflow occurred. If -frounding-math, proceed iff the
12900 result of calling FUNC was exact. */
12902 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12903 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12904 && (!flag_rounding_math || !inexact)))
12906 REAL_VALUE_TYPE re, im;
12908 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12909 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12910 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12911 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12912 but the mpft_t is not, then we underflowed in the
12915 || (real_isfinite (&re) && real_isfinite (&im)
12916 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12917 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12919 REAL_VALUE_TYPE re_mode, im_mode;
12921 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12922 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12923 /* Proceed iff the specified mode can hold the value. */
12925 || (real_identical (&re_mode, &re)
12926 && real_identical (&im_mode, &im)))
12927 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12928 build_real (TREE_TYPE (type), im_mode));
12934 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12935 FUNC on it and return the resulting value as a tree with type TYPE.
12936 If MIN and/or MAX are not NULL, then the supplied ARG must be
12937 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12938 acceptable values, otherwise they are not. The mpfr precision is
12939 set to the precision of TYPE. We assume that function FUNC returns
12940 zero if the result could be calculated exactly within the requested
12944 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12945 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12948 tree result = NULL_TREE;
12952 /* To proceed, MPFR must exactly represent the target floating point
12953 format, which only happens when the target base equals two. */
12954 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12955 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12957 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12959 if (real_isfinite (ra)
12960 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12961 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12963 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12964 const int prec = fmt->p;
12965 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12969 mpfr_init2 (m, prec);
12970 mpfr_from_real (m, ra, GMP_RNDN);
12971 mpfr_clear_flags ();
12972 inexact = func (m, m, rnd);
12973 result = do_mpfr_ckconv (m, type, inexact);
12981 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12982 FUNC on it and return the resulting value as a tree with type TYPE.
12983 The mpfr precision is set to the precision of TYPE. We assume that
12984 function FUNC returns zero if the result could be calculated
12985 exactly within the requested precision. */
12988 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12989 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12991 tree result = NULL_TREE;
12996 /* To proceed, MPFR must exactly represent the target floating point
12997 format, which only happens when the target base equals two. */
12998 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12999 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13000 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13002 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13003 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13005 if (real_isfinite (ra1) && real_isfinite (ra2))
13007 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13008 const int prec = fmt->p;
13009 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13013 mpfr_inits2 (prec, m1, m2, NULL);
13014 mpfr_from_real (m1, ra1, GMP_RNDN);
13015 mpfr_from_real (m2, ra2, GMP_RNDN);
13016 mpfr_clear_flags ();
13017 inexact = func (m1, m1, m2, rnd);
13018 result = do_mpfr_ckconv (m1, type, inexact);
13019 mpfr_clears (m1, m2, NULL);
13026 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13027 FUNC on it and return the resulting value as a tree with type TYPE.
13028 The mpfr precision is set to the precision of TYPE. We assume that
13029 function FUNC returns zero if the result could be calculated
13030 exactly within the requested precision. */
13033 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13034 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13036 tree result = NULL_TREE;
13042 /* To proceed, MPFR must exactly represent the target floating point
13043 format, which only happens when the target base equals two. */
13044 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13045 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13046 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13047 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13049 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13050 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13051 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13053 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13055 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13056 const int prec = fmt->p;
13057 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13061 mpfr_inits2 (prec, m1, m2, m3, NULL);
13062 mpfr_from_real (m1, ra1, GMP_RNDN);
13063 mpfr_from_real (m2, ra2, GMP_RNDN);
13064 mpfr_from_real (m3, ra3, GMP_RNDN);
13065 mpfr_clear_flags ();
13066 inexact = func (m1, m1, m2, m3, rnd);
13067 result = do_mpfr_ckconv (m1, type, inexact);
13068 mpfr_clears (m1, m2, m3, NULL);
13075 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13076 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13077 If ARG_SINP and ARG_COSP are NULL then the result is returned
13078 as a complex value.
13079 The type is taken from the type of ARG and is used for setting the
13080 precision of the calculation and results. */
13083 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13085 tree const type = TREE_TYPE (arg);
13086 tree result = NULL_TREE;
13090 /* To proceed, MPFR must exactly represent the target floating point
13091 format, which only happens when the target base equals two. */
13092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13093 && TREE_CODE (arg) == REAL_CST
13094 && !TREE_OVERFLOW (arg))
13096 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13098 if (real_isfinite (ra))
13100 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13101 const int prec = fmt->p;
13102 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13103 tree result_s, result_c;
13107 mpfr_inits2 (prec, m, ms, mc, NULL);
13108 mpfr_from_real (m, ra, GMP_RNDN);
13109 mpfr_clear_flags ();
13110 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13111 result_s = do_mpfr_ckconv (ms, type, inexact);
13112 result_c = do_mpfr_ckconv (mc, type, inexact);
13113 mpfr_clears (m, ms, mc, NULL);
13114 if (result_s && result_c)
13116 /* If we are to return in a complex value do so. */
13117 if (!arg_sinp && !arg_cosp)
13118 return build_complex (build_complex_type (type),
13119 result_c, result_s);
13121 /* Dereference the sin/cos pointer arguments. */
13122 arg_sinp = build_fold_indirect_ref (arg_sinp);
13123 arg_cosp = build_fold_indirect_ref (arg_cosp);
13124 /* Proceed if valid pointer type were passed in. */
13125 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13126 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13128 /* Set the values. */
13129 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13131 TREE_SIDE_EFFECTS (result_s) = 1;
13132 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13134 TREE_SIDE_EFFECTS (result_c) = 1;
13135 /* Combine the assignments into a compound expr. */
13136 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13137 result_s, result_c));
13145 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13146 two-argument mpfr order N Bessel function FUNC on them and return
13147 the resulting value as a tree with type TYPE. The mpfr precision
13148 is set to the precision of TYPE. We assume that function FUNC
13149 returns zero if the result could be calculated exactly within the
13150 requested precision. */
13152 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13153 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13154 const REAL_VALUE_TYPE *min, bool inclusive)
13156 tree result = NULL_TREE;
13161 /* To proceed, MPFR must exactly represent the target floating point
13162 format, which only happens when the target base equals two. */
13163 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13164 && host_integerp (arg1, 0)
13165 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13167 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13168 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13171 && real_isfinite (ra)
13172 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13174 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13175 const int prec = fmt->p;
13176 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13180 mpfr_init2 (m, prec);
13181 mpfr_from_real (m, ra, GMP_RNDN);
13182 mpfr_clear_flags ();
13183 inexact = func (m, n, m, rnd);
13184 result = do_mpfr_ckconv (m, type, inexact);
13192 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13193 the pointer *(ARG_QUO) and return the result. The type is taken
13194 from the type of ARG0 and is used for setting the precision of the
13195 calculation and results. */
13198 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13200 tree const type = TREE_TYPE (arg0);
13201 tree result = NULL_TREE;
13206 /* To proceed, MPFR must exactly represent the target floating point
13207 format, which only happens when the target base equals two. */
13208 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13209 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13210 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13212 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13213 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13215 if (real_isfinite (ra0) && real_isfinite (ra1))
13217 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13218 const int prec = fmt->p;
13219 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13224 mpfr_inits2 (prec, m0, m1, NULL);
13225 mpfr_from_real (m0, ra0, GMP_RNDN);
13226 mpfr_from_real (m1, ra1, GMP_RNDN);
13227 mpfr_clear_flags ();
13228 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13229 /* Remquo is independent of the rounding mode, so pass
13230 inexact=0 to do_mpfr_ckconv(). */
13231 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13232 mpfr_clears (m0, m1, NULL);
13235 /* MPFR calculates quo in the host's long so it may
13236 return more bits in quo than the target int can hold
13237 if sizeof(host long) > sizeof(target int). This can
13238 happen even for native compilers in LP64 mode. In
13239 these cases, modulo the quo value with the largest
13240 number that the target int can hold while leaving one
13241 bit for the sign. */
13242 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13243 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13245 /* Dereference the quo pointer argument. */
13246 arg_quo = build_fold_indirect_ref (arg_quo);
13247 /* Proceed iff a valid pointer type was passed in. */
13248 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13250 /* Set the value. */
13251 tree result_quo = fold_build2 (MODIFY_EXPR,
13252 TREE_TYPE (arg_quo), arg_quo,
13253 build_int_cst (NULL, integer_quo));
13254 TREE_SIDE_EFFECTS (result_quo) = 1;
13255 /* Combine the quo assignment with the rem. */
13256 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13257 result_quo, result_rem));
13265 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13266 resulting value as a tree with type TYPE. The mpfr precision is
13267 set to the precision of TYPE. We assume that this mpfr function
13268 returns zero if the result could be calculated exactly within the
13269 requested precision. In addition, the integer pointer represented
13270 by ARG_SG will be dereferenced and set to the appropriate signgam
13274 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13276 tree result = NULL_TREE;
13280 /* To proceed, MPFR must exactly represent the target floating point
13281 format, which only happens when the target base equals two. Also
13282 verify ARG is a constant and that ARG_SG is an int pointer. */
13283 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13284 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13285 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13286 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13288 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13290 /* In addition to NaN and Inf, the argument cannot be zero or a
13291 negative integer. */
13292 if (real_isfinite (ra)
13293 && ra->cl != rvc_zero
13294 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13296 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13297 const int prec = fmt->p;
13298 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13303 mpfr_init2 (m, prec);
13304 mpfr_from_real (m, ra, GMP_RNDN);
13305 mpfr_clear_flags ();
13306 inexact = mpfr_lgamma (m, &sg, m, rnd);
13307 result_lg = do_mpfr_ckconv (m, type, inexact);
13313 /* Dereference the arg_sg pointer argument. */
13314 arg_sg = build_fold_indirect_ref (arg_sg);
13315 /* Assign the signgam value into *arg_sg. */
13316 result_sg = fold_build2 (MODIFY_EXPR,
13317 TREE_TYPE (arg_sg), arg_sg,
13318 build_int_cst (NULL, sg));
13319 TREE_SIDE_EFFECTS (result_sg) = 1;
13320 /* Combine the signgam assignment with the lgamma result. */
13321 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13322 result_sg, result_lg));
13330 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13331 function FUNC on it and return the resulting value as a tree with
13332 type TYPE. The mpfr precision is set to the precision of TYPE. We
13333 assume that function FUNC returns zero if the result could be
13334 calculated exactly within the requested precision. */
13337 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13339 tree result = NULL_TREE;
13343 /* To proceed, MPFR must exactly represent the target floating point
13344 format, which only happens when the target base equals two. */
13345 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13347 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13349 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13350 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13352 if (real_isfinite (re) && real_isfinite (im))
13354 const struct real_format *const fmt =
13355 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13356 const int prec = fmt->p;
13357 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13358 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13362 mpc_init2 (m, prec);
13363 mpfr_from_real (mpc_realref(m), re, rnd);
13364 mpfr_from_real (mpc_imagref(m), im, rnd);
13365 mpfr_clear_flags ();
13366 inexact = func (m, m, crnd);
13367 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13375 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13376 mpc function FUNC on it and return the resulting value as a tree
13377 with type TYPE. The mpfr precision is set to the precision of
13378 TYPE. We assume that function FUNC returns zero if the result
13379 could be calculated exactly within the requested precision. If
13380 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13381 in the arguments and/or results. */
13384 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13385 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13387 tree result = NULL_TREE;
13392 /* To proceed, MPFR must exactly represent the target floating point
13393 format, which only happens when the target base equals two. */
13394 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13396 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13398 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13400 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13401 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13402 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13403 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13406 || (real_isfinite (re0) && real_isfinite (im0)
13407 && real_isfinite (re1) && real_isfinite (im1)))
13409 const struct real_format *const fmt =
13410 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13411 const int prec = fmt->p;
13412 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13413 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13417 mpc_init2 (m0, prec);
13418 mpc_init2 (m1, prec);
13419 mpfr_from_real (mpc_realref(m0), re0, rnd);
13420 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13421 mpfr_from_real (mpc_realref(m1), re1, rnd);
13422 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13423 mpfr_clear_flags ();
13424 inexact = func (m0, m0, m1, crnd);
13425 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13435 The functions below provide an alternate interface for folding
13436 builtin function calls presented as GIMPLE_CALL statements rather
13437 than as CALL_EXPRs. The folded result is still expressed as a
13438 tree. There is too much code duplication in the handling of
13439 varargs functions, and a more intrusive re-factoring would permit
13440 better sharing of code between the tree and statement-based
13441 versions of these functions. */
13443 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13444 along with N new arguments specified as the "..." parameters. SKIP
13445 is the number of arguments in STMT to be omitted. This function is used
13446 to do varargs-to-varargs transformations. */
13449 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13451 int oldnargs = gimple_call_num_args (stmt);
13452 int nargs = oldnargs - skip + n;
13453 tree fntype = TREE_TYPE (fndecl);
13454 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13458 location_t loc = gimple_location (stmt);
13460 buffer = XALLOCAVEC (tree, nargs);
13462 for (i = 0; i < n; i++)
13463 buffer[i] = va_arg (ap, tree);
13465 for (j = skip; j < oldnargs; j++, i++)
13466 buffer[i] = gimple_call_arg (stmt, j);
13468 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13471 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13472 a normal call should be emitted rather than expanding the function
13473 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13476 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13478 tree dest, size, len, fn, fmt, flag;
13479 const char *fmt_str;
13480 int nargs = gimple_call_num_args (stmt);
13482 /* Verify the required arguments in the original call. */
13485 dest = gimple_call_arg (stmt, 0);
13486 if (!validate_arg (dest, POINTER_TYPE))
13488 flag = gimple_call_arg (stmt, 1);
13489 if (!validate_arg (flag, INTEGER_TYPE))
13491 size = gimple_call_arg (stmt, 2);
13492 if (!validate_arg (size, INTEGER_TYPE))
13494 fmt = gimple_call_arg (stmt, 3);
13495 if (!validate_arg (fmt, POINTER_TYPE))
13498 if (! host_integerp (size, 1))
13503 if (!init_target_chars ())
13506 /* Check whether the format is a literal string constant. */
13507 fmt_str = c_getstr (fmt);
13508 if (fmt_str != NULL)
13510 /* If the format doesn't contain % args or %%, we know the size. */
13511 if (strchr (fmt_str, target_percent) == 0)
13513 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13514 len = build_int_cstu (size_type_node, strlen (fmt_str));
13516 /* If the format is "%s" and first ... argument is a string literal,
13517 we know the size too. */
13518 else if (fcode == BUILT_IN_SPRINTF_CHK
13519 && strcmp (fmt_str, target_percent_s) == 0)
13525 arg = gimple_call_arg (stmt, 4);
13526 if (validate_arg (arg, POINTER_TYPE))
13528 len = c_strlen (arg, 1);
13529 if (! len || ! host_integerp (len, 1))
13536 if (! integer_all_onesp (size))
13538 if (! len || ! tree_int_cst_lt (len, size))
13542 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13543 or if format doesn't contain % chars or is "%s". */
13544 if (! integer_zerop (flag))
13546 if (fmt_str == NULL)
13548 if (strchr (fmt_str, target_percent) != NULL
13549 && strcmp (fmt_str, target_percent_s))
13553 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13554 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13555 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13559 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13562 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13563 a normal call should be emitted rather than expanding the function
13564 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13565 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13566 passed as second argument. */
13569 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13570 enum built_in_function fcode)
13572 tree dest, size, len, fn, fmt, flag;
13573 const char *fmt_str;
13575 /* Verify the required arguments in the original call. */
13576 if (gimple_call_num_args (stmt) < 5)
13578 dest = gimple_call_arg (stmt, 0);
13579 if (!validate_arg (dest, POINTER_TYPE))
13581 len = gimple_call_arg (stmt, 1);
13582 if (!validate_arg (len, INTEGER_TYPE))
13584 flag = gimple_call_arg (stmt, 2);
13585 if (!validate_arg (flag, INTEGER_TYPE))
13587 size = gimple_call_arg (stmt, 3);
13588 if (!validate_arg (size, INTEGER_TYPE))
13590 fmt = gimple_call_arg (stmt, 4);
13591 if (!validate_arg (fmt, POINTER_TYPE))
13594 if (! host_integerp (size, 1))
13597 if (! integer_all_onesp (size))
13599 if (! host_integerp (len, 1))
13601 /* If LEN is not constant, try MAXLEN too.
13602 For MAXLEN only allow optimizing into non-_ocs function
13603 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13604 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13610 if (tree_int_cst_lt (size, maxlen))
13614 if (!init_target_chars ())
13617 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13618 or if format doesn't contain % chars or is "%s". */
13619 if (! integer_zerop (flag))
13621 fmt_str = c_getstr (fmt);
13622 if (fmt_str == NULL)
13624 if (strchr (fmt_str, target_percent) != NULL
13625 && strcmp (fmt_str, target_percent_s))
13629 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13631 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13632 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13636 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13639 /* Builtins with folding operations that operate on "..." arguments
13640 need special handling; we need to store the arguments in a convenient
13641 data structure before attempting any folding. Fortunately there are
13642 only a few builtins that fall into this category. FNDECL is the
13643 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13644 result of the function call is ignored. */
13647 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13648 bool ignore ATTRIBUTE_UNUSED)
13650 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13651 tree ret = NULL_TREE;
13655 case BUILT_IN_SPRINTF_CHK:
13656 case BUILT_IN_VSPRINTF_CHK:
13657 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13660 case BUILT_IN_SNPRINTF_CHK:
13661 case BUILT_IN_VSNPRINTF_CHK:
13662 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13669 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13670 TREE_NO_WARNING (ret) = 1;
13676 /* A wrapper function for builtin folding that prevents warnings for
13677 "statement without effect" and the like, caused by removing the
13678 call node earlier than the warning is generated. */
13681 fold_call_stmt (gimple stmt, bool ignore)
13683 tree ret = NULL_TREE;
13684 tree fndecl = gimple_call_fndecl (stmt);
13685 location_t loc = gimple_location (stmt);
13687 && TREE_CODE (fndecl) == FUNCTION_DECL
13688 && DECL_BUILT_IN (fndecl)
13689 && !gimple_call_va_arg_pack_p (stmt))
13691 int nargs = gimple_call_num_args (stmt);
13693 if (avoid_folding_inline_builtin (fndecl))
13695 /* FIXME: Don't use a list in this interface. */
13696 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13698 tree arglist = NULL_TREE;
13700 for (i = nargs - 1; i >= 0; i--)
13701 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13702 return targetm.fold_builtin (fndecl, arglist, ignore);
13706 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13708 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13710 for (i = 0; i < nargs; i++)
13711 args[i] = gimple_call_arg (stmt, i);
13712 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13715 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13718 /* Propagate location information from original call to
13719 expansion of builtin. Otherwise things like
13720 maybe_emit_chk_warning, that operate on the expansion
13721 of a builtin, will use the wrong location information. */
13722 if (gimple_has_location (stmt))
13724 tree realret = ret;
13725 if (TREE_CODE (ret) == NOP_EXPR)
13726 realret = TREE_OPERAND (ret, 0);
13727 if (CAN_HAVE_LOCATION_P (realret)
13728 && !EXPR_HAS_LOCATION (realret))
13729 SET_EXPR_LOCATION (realret, loc);
13739 /* Look up the function in built_in_decls that corresponds to DECL
13740 and set ASMSPEC as its user assembler name. DECL must be a
13741 function decl that declares a builtin. */
13744 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13747 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13748 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13751 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13752 set_user_assembler_name (builtin, asmspec);
13753 switch (DECL_FUNCTION_CODE (decl))
13755 case BUILT_IN_MEMCPY:
13756 init_block_move_fn (asmspec);
13757 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13759 case BUILT_IN_MEMSET:
13760 init_block_clear_fn (asmspec);
13761 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13763 case BUILT_IN_MEMMOVE:
13764 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13766 case BUILT_IN_MEMCMP:
13767 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13769 case BUILT_IN_ABORT:
13770 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13773 if (INT_TYPE_SIZE < BITS_PER_WORD)
13775 set_user_assembler_libfunc ("ffs", asmspec);
13776 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13777 MODE_INT, 0), "ffs");