1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
41 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic.h"
53 #ifndef SLOW_UNALIGNED_ACCESS
54 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
57 #ifndef PAD_VARARGS_DOWN
58 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strcmp (tree, rtx);
114 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
115 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
116 static rtx expand_builtin_memcpy (tree, rtx);
117 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
119 enum machine_mode, int);
120 static rtx expand_builtin_strcpy (tree, rtx);
121 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
122 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strncpy (tree, rtx);
124 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
125 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
127 static rtx expand_builtin_bzero (tree);
128 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, rtx);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static tree stabilize_va_list_loc (location_t, tree, int);
133 static rtx expand_builtin_expect (tree, rtx);
134 static tree fold_builtin_constant_p (tree);
135 static tree fold_builtin_expect (location_t, tree, tree);
136 static tree fold_builtin_classify_type (tree);
137 static tree fold_builtin_strlen (location_t, tree, tree);
138 static tree fold_builtin_inf (location_t, tree, int);
139 static tree fold_builtin_nan (tree, tree, int);
140 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
141 static bool validate_arg (const_tree, enum tree_code code);
142 static bool integer_valued_real_p (tree);
143 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
144 static bool readonly_data_expr (tree);
145 static rtx expand_builtin_fabs (tree, rtx, rtx);
146 static rtx expand_builtin_signbit (tree, rtx);
147 static tree fold_builtin_sqrt (location_t, tree, tree);
148 static tree fold_builtin_cbrt (location_t, tree, tree);
149 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_cos (location_t, tree, tree, tree);
152 static tree fold_builtin_cosh (location_t, tree, tree, tree);
153 static tree fold_builtin_tan (tree, tree);
154 static tree fold_builtin_trunc (location_t, tree, tree);
155 static tree fold_builtin_floor (location_t, tree, tree);
156 static tree fold_builtin_ceil (location_t, tree, tree);
157 static tree fold_builtin_round (location_t, tree, tree);
158 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
159 static tree fold_builtin_bitop (tree, tree);
160 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
161 static tree fold_builtin_strchr (location_t, tree, tree, tree);
162 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
164 static tree fold_builtin_strcmp (location_t, tree, tree);
165 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
166 static tree fold_builtin_signbit (location_t, tree, tree);
167 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_isascii (location_t, tree);
169 static tree fold_builtin_toascii (location_t, tree);
170 static tree fold_builtin_isdigit (location_t, tree);
171 static tree fold_builtin_fabs (location_t, tree, tree);
172 static tree fold_builtin_abs (location_t, tree, tree);
173 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
176 static tree fold_builtin_0 (location_t, tree, bool);
177 static tree fold_builtin_1 (location_t, tree, tree, bool);
178 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
179 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
180 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
181 static tree fold_builtin_varargs (location_t, tree, tree, bool);
183 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
184 static tree fold_builtin_strstr (location_t, tree, tree, tree);
185 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
186 static tree fold_builtin_strcat (location_t, tree, tree);
187 static tree fold_builtin_strncat (location_t, tree, tree, tree);
188 static tree fold_builtin_strspn (location_t, tree, tree);
189 static tree fold_builtin_strcspn (location_t, tree, tree);
190 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
230 is_builtin_name (const char *name)
232 if (strncmp (name, "__builtin_", 10) == 0)
234 if (strncmp (name, "__sync_", 7) == 0)
240 /* Return true if DECL is a function symbol representing a built-in. */
243 is_builtin_fn (tree decl)
245 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249 /* Return true if NODE should be considered for inline expansion regardless
250 of the optimization level. This means whenever a function is invoked with
251 its "internal" name, which normally contains the prefix "__builtin". */
254 called_as_built_in (tree node)
256 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
257 we want the name used to call the function, not the name it
259 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
260 return is_builtin_name (name);
263 /* Return the alignment in bits of EXP, an object.
264 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
265 guessed alignment e.g. from type alignment. */
268 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
273 if (handled_component_p (exp))
275 HOST_WIDE_INT bitsize, bitpos;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
280 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
281 &mode, &unsignedp, &volatilep, true);
283 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
288 if (TREE_CODE (offset) == PLUS_EXPR)
290 next_offset = TREE_OPERAND (offset, 0);
291 offset = TREE_OPERAND (offset, 1);
295 if (host_integerp (offset, 1))
297 /* Any overflow in calculating offset_bits won't change
300 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 inner = MIN (inner, (offset_bits & -offset_bits));
305 else if (TREE_CODE (offset) == MULT_EXPR
306 && host_integerp (TREE_OPERAND (offset, 1), 1))
308 /* Any overflow in calculating offset_factor won't change
310 unsigned offset_factor
311 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
315 inner = MIN (inner, (offset_factor & -offset_factor));
319 inner = MIN (inner, BITS_PER_UNIT);
322 offset = next_offset;
325 if (TREE_CODE (exp) == CONST_DECL)
326 exp = DECL_INITIAL (exp);
328 && TREE_CODE (exp) != LABEL_DECL)
329 align = MIN (inner, DECL_ALIGN (exp));
330 #ifdef CONSTANT_ALIGNMENT
331 else if (CONSTANT_CLASS_P (exp))
332 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
335 || TREE_CODE (exp) == INDIRECT_REF)
336 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 align = MIN (align, inner);
339 return MIN (align, max_align);
342 /* Returns true iff we can trust that alignment information has been
343 calculated properly. */
346 can_trust_pointer_alignment (void)
348 /* We rely on TER to compute accurate alignment information. */
349 return (optimize && flag_tree_ter);
352 /* Return the alignment in bits of EXP, a pointer valued expression.
353 But don't return more than MAX_ALIGN no matter what.
354 The alignment returned is, by default, the alignment of the thing that
355 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
357 Otherwise, look at the expression to see if we can do better, i.e., if the
358 expression is actually pointing at an object whose alignment is tighter. */
361 get_pointer_alignment (tree exp, unsigned int max_align)
363 unsigned int align, inner;
365 if (!can_trust_pointer_alignment ())
368 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
371 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
372 align = MIN (align, max_align);
376 switch (TREE_CODE (exp))
379 exp = TREE_OPERAND (exp, 0);
380 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
383 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
384 align = MIN (inner, max_align);
387 case POINTER_PLUS_EXPR:
388 /* If sum of pointer + int, restrict our maximum alignment to that
389 imposed by the integer. If not, we can't do any better than
391 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
394 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
395 & (max_align / BITS_PER_UNIT - 1))
399 exp = TREE_OPERAND (exp, 0);
403 /* See what we are pointing at and look at its alignment. */
404 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
412 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
413 way, because it could contain a zero byte in the middle.
414 TREE_STRING_LENGTH is the size of the character array, not the string.
416 ONLY_VALUE should be nonzero if the result is not going to be emitted
417 into the instruction stream and zero if it is going to be expanded.
418 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
419 is returned, otherwise NULL, since
420 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
421 evaluate the side-effects.
423 The value returned is of type `ssizetype'.
425 Unfortunately, string_constant can't access the values of const char
426 arrays with initializers, so neither can we do so here. */
429 c_strlen (tree src, int only_value)
432 HOST_WIDE_INT offset;
438 if (TREE_CODE (src) == COND_EXPR
439 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
443 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
444 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
445 if (tree_int_cst_equal (len1, len2))
449 if (TREE_CODE (src) == COMPOUND_EXPR
450 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
451 return c_strlen (TREE_OPERAND (src, 1), only_value);
453 if (EXPR_HAS_LOCATION (src))
454 loc = EXPR_LOCATION (src);
456 loc = input_location;
458 src = string_constant (src, &offset_node);
462 max = TREE_STRING_LENGTH (src) - 1;
463 ptr = TREE_STRING_POINTER (src);
465 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
467 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
468 compute the offset to the following null if we don't know where to
469 start searching for it. */
472 for (i = 0; i < max; i++)
476 /* We don't know the starting offset, but we do know that the string
477 has no internal zero bytes. We can assume that the offset falls
478 within the bounds of the string; otherwise, the programmer deserves
479 what he gets. Subtract the offset from the length of the string,
480 and return that. This would perhaps not be valid if we were dealing
481 with named arrays in addition to literal string constants. */
483 return size_diffop_loc (loc, size_int (max), offset_node);
486 /* We have a known offset into the string. Start searching there for
487 a null character if we can represent it as a single HOST_WIDE_INT. */
488 if (offset_node == 0)
490 else if (! host_integerp (offset_node, 0))
493 offset = tree_low_cst (offset_node, 0);
495 /* If the offset is known to be out of bounds, warn, and call strlen at
497 if (offset < 0 || offset > max)
499 /* Suppress multiple warnings for propagated constant strings. */
500 if (! TREE_NO_WARNING (src))
502 warning_at (loc, 0, "offset outside bounds of constant string");
503 TREE_NO_WARNING (src) = 1;
508 /* Use strlen to search for the first zero byte. Since any strings
509 constructed with build_string will have nulls appended, we win even
510 if we get handed something like (char[4])"abcd".
512 Since OFFSET is our starting index into the string, no further
513 calculation is needed. */
514 return ssize_int (strlen (ptr + offset));
517 /* Return a char pointer for a C string if it is a string constant
518 or sum of string constant and integer constant. */
525 src = string_constant (src, &offset_node);
529 if (offset_node == 0)
530 return TREE_STRING_POINTER (src);
531 else if (!host_integerp (offset_node, 1)
532 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
535 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
538 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
539 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 c_readstr (const char *str, enum machine_mode mode)
548 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
553 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 if (WORDS_BIG_ENDIAN)
557 j = GET_MODE_SIZE (mode) - i - 1;
558 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
559 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
560 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
565 ch = (unsigned char) str[i];
566 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
568 return immed_double_const (c[0], c[1], mode);
571 /* Cast a target constant CST to target CHAR and if that value fits into
572 host char type, return zero and put that value into variable pointed to by
576 target_char_cast (tree cst, char *p)
578 unsigned HOST_WIDE_INT val, hostval;
580 if (!host_integerp (cst, 1)
581 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
584 val = tree_low_cst (cst, 1);
585 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
586 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
590 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
599 /* Similar to save_expr, but assumes that arbitrary code is not executed
600 in between the multiple evaluations. In particular, we assume that a
601 non-addressable local variable will not be modified. */
604 builtin_save_expr (tree exp)
606 if (TREE_ADDRESSABLE (exp) == 0
607 && (TREE_CODE (exp) == PARM_DECL
608 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
611 return save_expr (exp);
614 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
615 times to get the address of either a higher stack frame, or a return
616 address located within it (depending on FNDECL_CODE). */
619 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
623 #ifdef INITIAL_FRAME_ADDRESS_RTX
624 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
628 /* For a zero count with __builtin_return_address, we don't care what
629 frame address we return, because target-specific definitions will
630 override us. Therefore frame pointer elimination is OK, and using
631 the soft frame pointer is OK.
633 For a nonzero count, or a zero count with __builtin_frame_address,
634 we require a stable offset from the current frame pointer to the
635 previous one, so we must use the hard frame pointer, and
636 we must disable frame pointer elimination. */
637 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
638 tem = frame_pointer_rtx;
641 tem = hard_frame_pointer_rtx;
643 /* Tell reload not to eliminate the frame pointer. */
644 crtl->accesses_prior_frames = 1;
648 /* Some machines need special handling before we can access
649 arbitrary frames. For example, on the SPARC, we must first flush
650 all register windows to the stack. */
651 #ifdef SETUP_FRAME_ADDRESSES
653 SETUP_FRAME_ADDRESSES ();
656 /* On the SPARC, the return address is not in the frame, it is in a
657 register. There is no way to access it off of the current frame
658 pointer, but it can be accessed off the previous frame pointer by
659 reading the value from the register window save area. */
660 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
661 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
665 /* Scan back COUNT frames to the specified frame. */
666 for (i = 0; i < count; i++)
668 /* Assume the dynamic chain pointer is in the word that the
669 frame address points to, unless otherwise specified. */
670 #ifdef DYNAMIC_CHAIN_ADDRESS
671 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 tem = memory_address (Pmode, tem);
674 tem = gen_frame_mem (Pmode, tem);
675 tem = copy_to_reg (tem);
678 /* For __builtin_frame_address, return what we've got. But, on
679 the SPARC for example, we may have to add a bias. */
680 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
681 #ifdef FRAME_ADDR_RTX
682 return FRAME_ADDR_RTX (tem);
687 /* For __builtin_return_address, get the return address from that frame. */
688 #ifdef RETURN_ADDR_RTX
689 tem = RETURN_ADDR_RTX (count, tem);
691 tem = memory_address (Pmode,
692 plus_constant (tem, GET_MODE_SIZE (Pmode)));
693 tem = gen_frame_mem (Pmode, tem);
698 /* Alias set used for setjmp buffer. */
699 static alias_set_type setjmp_alias_set = -1;
701 /* Construct the leading half of a __builtin_setjmp call. Control will
702 return to RECEIVER_LABEL. This is also called directly by the SJLJ
703 exception handling code. */
706 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
708 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
712 if (setjmp_alias_set == -1)
713 setjmp_alias_set = new_alias_set ();
715 buf_addr = convert_memory_address (Pmode, buf_addr);
717 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
719 /* We store the frame pointer and the address of receiver_label in
720 the buffer and use the rest of it for the stack save area, which
721 is machine-dependent. */
723 mem = gen_rtx_MEM (Pmode, buf_addr);
724 set_mem_alias_set (mem, setjmp_alias_set);
725 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
727 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
728 set_mem_alias_set (mem, setjmp_alias_set);
730 emit_move_insn (validize_mem (mem),
731 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
733 stack_save = gen_rtx_MEM (sa_mode,
734 plus_constant (buf_addr,
735 2 * GET_MODE_SIZE (Pmode)));
736 set_mem_alias_set (stack_save, setjmp_alias_set);
737 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
739 /* If there is further processing to do, do it. */
740 #ifdef HAVE_builtin_setjmp_setup
741 if (HAVE_builtin_setjmp_setup)
742 emit_insn (gen_builtin_setjmp_setup (buf_addr));
745 /* Tell optimize_save_area_alloca that extra work is going to
746 need to go on during alloca. */
747 cfun->calls_setjmp = 1;
749 /* We have a nonlocal label. */
750 cfun->has_nonlocal_label = 1;
753 /* Construct the trailing part of a __builtin_setjmp call. This is
754 also called directly by the SJLJ exception handling code. */
757 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
761 /* Clobber the FP when we get here, so we have to make sure it's
762 marked as used by this function. */
763 emit_use (hard_frame_pointer_rtx);
765 /* Mark the static chain as clobbered here so life information
766 doesn't get messed up for it. */
767 chain = targetm.calls.static_chain (current_function_decl, true);
768 if (chain && REG_P (chain))
769 emit_clobber (chain);
771 /* Now put in the code to restore the frame pointer, and argument
772 pointer, if needed. */
773 #ifdef HAVE_nonlocal_goto
774 if (! HAVE_nonlocal_goto)
777 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
778 /* This might change the hard frame pointer in ways that aren't
779 apparent to early optimization passes, so force a clobber. */
780 emit_clobber (hard_frame_pointer_rtx);
783 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
784 if (fixed_regs[ARG_POINTER_REGNUM])
786 #ifdef ELIMINABLE_REGS
788 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
790 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
791 if (elim_regs[i].from == ARG_POINTER_REGNUM
792 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
795 if (i == ARRAY_SIZE (elim_regs))
798 /* Now restore our arg pointer from the address at which it
799 was saved in our stack frame. */
800 emit_move_insn (crtl->args.internal_arg_pointer,
801 copy_to_reg (get_arg_pointer_save_area ()));
806 #ifdef HAVE_builtin_setjmp_receiver
807 if (HAVE_builtin_setjmp_receiver)
808 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
811 #ifdef HAVE_nonlocal_goto_receiver
812 if (HAVE_nonlocal_goto_receiver)
813 emit_insn (gen_nonlocal_goto_receiver ());
818 /* We must not allow the code we just generated to be reordered by
819 scheduling. Specifically, the update of the frame pointer must
820 happen immediately, not later. */
821 emit_insn (gen_blockage ());
824 /* __builtin_longjmp is passed a pointer to an array of five words (not
825 all will be used on all machines). It operates similarly to the C
826 library function of the same name, but is more efficient. Much of
827 the code below is copied from the handling of non-local gotos. */
830 expand_builtin_longjmp (rtx buf_addr, rtx value)
832 rtx fp, lab, stack, insn, last;
833 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
835 /* DRAP is needed for stack realign if longjmp is expanded to current
837 if (SUPPORTS_STACK_ALIGNMENT)
838 crtl->need_drap = true;
840 if (setjmp_alias_set == -1)
841 setjmp_alias_set = new_alias_set ();
843 buf_addr = convert_memory_address (Pmode, buf_addr);
845 buf_addr = force_reg (Pmode, buf_addr);
847 /* We require that the user must pass a second argument of 1, because
848 that is what builtin_setjmp will return. */
849 gcc_assert (value == const1_rtx);
851 last = get_last_insn ();
852 #ifdef HAVE_builtin_longjmp
853 if (HAVE_builtin_longjmp)
854 emit_insn (gen_builtin_longjmp (buf_addr));
858 fp = gen_rtx_MEM (Pmode, buf_addr);
859 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
860 GET_MODE_SIZE (Pmode)));
862 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
863 2 * GET_MODE_SIZE (Pmode)));
864 set_mem_alias_set (fp, setjmp_alias_set);
865 set_mem_alias_set (lab, setjmp_alias_set);
866 set_mem_alias_set (stack, setjmp_alias_set);
868 /* Pick up FP, label, and SP from the block and jump. This code is
869 from expand_goto in stmt.c; see there for detailed comments. */
870 #ifdef HAVE_nonlocal_goto
871 if (HAVE_nonlocal_goto)
872 /* We have to pass a value to the nonlocal_goto pattern that will
873 get copied into the static_chain pointer, but it does not matter
874 what that value is, because builtin_setjmp does not use it. */
875 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
879 lab = copy_to_reg (lab);
881 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
882 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
884 emit_move_insn (hard_frame_pointer_rtx, fp);
885 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
887 emit_use (hard_frame_pointer_rtx);
888 emit_use (stack_pointer_rtx);
889 emit_indirect_jump (lab);
893 /* Search backwards and mark the jump insn as a non-local goto.
894 Note that this precludes the use of __builtin_longjmp to a
895 __builtin_setjmp target in the same function. However, we've
896 already cautioned the user that these functions are for
897 internal exception handling use only. */
898 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
900 gcc_assert (insn != last);
904 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
907 else if (CALL_P (insn))
912 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
913 and the address of the save area. */
916 expand_builtin_nonlocal_goto (tree exp)
918 tree t_label, t_save_area;
919 rtx r_label, r_save_area, r_fp, r_sp, insn;
921 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
924 t_label = CALL_EXPR_ARG (exp, 0);
925 t_save_area = CALL_EXPR_ARG (exp, 1);
927 r_label = expand_normal (t_label);
928 r_label = convert_memory_address (Pmode, r_label);
929 r_save_area = expand_normal (t_save_area);
930 r_save_area = convert_memory_address (Pmode, r_save_area);
931 /* Copy the address of the save location to a register just in case it was based
932 on the frame pointer. */
933 r_save_area = copy_to_reg (r_save_area);
934 r_fp = gen_rtx_MEM (Pmode, r_save_area);
935 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
936 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
938 crtl->has_nonlocal_goto = 1;
940 #ifdef HAVE_nonlocal_goto
941 /* ??? We no longer need to pass the static chain value, afaik. */
942 if (HAVE_nonlocal_goto)
943 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
947 r_label = copy_to_reg (r_label);
949 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
950 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952 /* Restore frame pointer for containing function.
953 This sets the actual hard register used for the frame pointer
954 to the location of the function's incoming static chain info.
955 The non-local goto handler will then adjust it to contain the
956 proper value and reload the argument pointer, if needed. */
957 emit_move_insn (hard_frame_pointer_rtx, r_fp);
958 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
960 /* USE of hard_frame_pointer_rtx added for consistency;
961 not clear if really needed. */
962 emit_use (hard_frame_pointer_rtx);
963 emit_use (stack_pointer_rtx);
965 /* If the architecture is using a GP register, we must
966 conservatively assume that the target function makes use of it.
967 The prologue of functions with nonlocal gotos must therefore
968 initialize the GP register to the appropriate value, and we
969 must then make sure that this value is live at the point
970 of the jump. (Note that this doesn't necessarily apply
971 to targets with a nonlocal_goto pattern; they are free
972 to implement it in their own way. Note also that this is
973 a no-op if the GP register is a global invariant.) */
974 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
975 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
976 emit_use (pic_offset_table_rtx);
978 emit_indirect_jump (r_label);
981 /* Search backwards to the jump insn and mark it as a
983 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
987 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
990 else if (CALL_P (insn))
997 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
998 (not all will be used on all machines) that was passed to __builtin_setjmp.
999 It updates the stack pointer in that block to correspond to the current
1003 expand_builtin_update_setjmp_buf (rtx buf_addr)
1005 enum machine_mode sa_mode = Pmode;
1009 #ifdef HAVE_save_stack_nonlocal
1010 if (HAVE_save_stack_nonlocal)
1011 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #ifdef STACK_SAVEAREA_MODE
1014 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1018 = gen_rtx_MEM (sa_mode,
1021 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1025 emit_insn (gen_setjmp ());
1028 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1031 /* Expand a call to __builtin_prefetch. For a target that does not support
1032 data prefetch, evaluate the memory address argument in case it has side
1036 expand_builtin_prefetch (tree exp)
1038 tree arg0, arg1, arg2;
1042 if (!validate_arglist (exp, POINTER_TYPE, 0))
1045 arg0 = CALL_EXPR_ARG (exp, 0);
1047 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1048 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 nargs = call_expr_nargs (exp);
1052 arg1 = CALL_EXPR_ARG (exp, 1);
1054 arg1 = integer_zero_node;
1056 arg2 = CALL_EXPR_ARG (exp, 2);
1058 arg2 = build_int_cst (NULL_TREE, 3);
1060 /* Argument 0 is an address. */
1061 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1063 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1064 if (TREE_CODE (arg1) != INTEGER_CST)
1066 error ("second argument to %<__builtin_prefetch%> must be a constant");
1067 arg1 = integer_zero_node;
1069 op1 = expand_normal (arg1);
1070 /* Argument 1 must be either zero or one. */
1071 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1073 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1078 /* Argument 2 (locality) must be a compile-time constant int. */
1079 if (TREE_CODE (arg2) != INTEGER_CST)
1081 error ("third argument to %<__builtin_prefetch%> must be a constant");
1082 arg2 = integer_zero_node;
1084 op2 = expand_normal (arg2);
1085 /* Argument 2 must be 0, 1, 2, or 3. */
1086 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1088 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1092 #ifdef HAVE_prefetch
1095 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1098 || (GET_MODE (op0) != Pmode))
1100 op0 = convert_memory_address (Pmode, op0);
1101 op0 = force_reg (Pmode, op0);
1103 emit_insn (gen_prefetch (op0, op1, op2));
1107 /* Don't do anything with direct references to volatile memory, but
1108 generate code to handle other side effects. */
1109 if (!MEM_P (op0) && side_effects_p (op0))
1113 /* Get a MEM rtx for expression EXP which is the address of an operand
1114 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1115 the maximum length of the block of memory that might be accessed or
1119 get_memory_rtx (tree exp, tree len)
1121 tree orig_exp = exp;
1125 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1126 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1127 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1128 exp = TREE_OPERAND (exp, 0);
1130 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1131 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1133 /* Get an expression we can use to find the attributes to assign to MEM.
1134 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1135 we can. First remove any nops. */
1136 while (CONVERT_EXPR_P (exp)
1137 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1138 exp = TREE_OPERAND (exp, 0);
1141 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1142 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1143 && host_integerp (TREE_OPERAND (exp, 1), 0)
1144 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1145 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1146 else if (TREE_CODE (exp) == ADDR_EXPR)
1147 exp = TREE_OPERAND (exp, 0);
1148 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1149 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1153 /* Honor attributes derived from exp, except for the alias set
1154 (as builtin stringops may alias with anything) and the size
1155 (as stringops may access multiple array elements). */
1158 set_mem_attributes (mem, exp, 0);
1161 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1163 /* Allow the string and memory builtins to overflow from one
1164 field into another, see http://gcc.gnu.org/PR23561.
1165 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1166 memory accessed by the string or memory builtin will fit
1167 within the field. */
1168 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1170 tree mem_expr = MEM_EXPR (mem);
1171 HOST_WIDE_INT offset = -1, length = -1;
1174 while (TREE_CODE (inner) == ARRAY_REF
1175 || CONVERT_EXPR_P (inner)
1176 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1177 || TREE_CODE (inner) == SAVE_EXPR)
1178 inner = TREE_OPERAND (inner, 0);
1180 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1182 if (MEM_OFFSET (mem)
1183 && CONST_INT_P (MEM_OFFSET (mem)))
1184 offset = INTVAL (MEM_OFFSET (mem));
1186 if (offset >= 0 && len && host_integerp (len, 0))
1187 length = tree_low_cst (len, 0);
1189 while (TREE_CODE (inner) == COMPONENT_REF)
1191 tree field = TREE_OPERAND (inner, 1);
1192 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1193 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1195 /* Bitfields are generally not byte-addressable. */
1196 gcc_assert (!DECL_BIT_FIELD (field)
1197 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1198 % BITS_PER_UNIT) == 0
1199 && host_integerp (DECL_SIZE (field), 0)
1200 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1201 % BITS_PER_UNIT) == 0));
1203 /* If we can prove that the memory starting at XEXP (mem, 0) and
1204 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1205 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1206 fields without DECL_SIZE_UNIT like flexible array members. */
1208 && DECL_SIZE_UNIT (field)
1209 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1215 && offset + length <= size)
1220 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1221 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1222 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1230 mem_expr = TREE_OPERAND (mem_expr, 0);
1231 inner = TREE_OPERAND (inner, 0);
1234 if (mem_expr == NULL)
1236 if (mem_expr != MEM_EXPR (mem))
1238 set_mem_expr (mem, mem_expr);
1239 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1242 set_mem_alias_set (mem, 0);
1243 set_mem_size (mem, NULL_RTX);
1249 /* Built-in functions to perform an untyped call and return. */
1251 /* For each register that may be used for calling a function, this
1252 gives a mode used to copy the register's value. VOIDmode indicates
1253 the register is not used for calling a function. If the machine
1254 has register windows, this gives only the outbound registers.
1255 INCOMING_REGNO gives the corresponding inbound register. */
1256 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1258 /* For each register that may be used for returning values, this gives
1259 a mode used to copy the register's value. VOIDmode indicates the
1260 register is not used for returning values. If the machine has
1261 register windows, this gives only the outbound registers.
1262 INCOMING_REGNO gives the corresponding inbound register. */
1263 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1265 /* Return the size required for the block returned by __builtin_apply_args,
1266 and initialize apply_args_mode. */
1269 apply_args_size (void)
1271 static int size = -1;
1274 enum machine_mode mode;
1276 /* The values computed by this function never change. */
1279 /* The first value is the incoming arg-pointer. */
1280 size = GET_MODE_SIZE (Pmode);
1282 /* The second value is the structure value address unless this is
1283 passed as an "invisible" first argument. */
1284 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1285 size += GET_MODE_SIZE (Pmode);
1287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1288 if (FUNCTION_ARG_REGNO_P (regno))
1290 mode = reg_raw_mode[regno];
1292 gcc_assert (mode != VOIDmode);
1294 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1295 if (size % align != 0)
1296 size = CEIL (size, align) * align;
1297 size += GET_MODE_SIZE (mode);
1298 apply_args_mode[regno] = mode;
1302 apply_args_mode[regno] = VOIDmode;
1308 /* Return the size required for the block returned by __builtin_apply,
1309 and initialize apply_result_mode. */
1312 apply_result_size (void)
1314 static int size = -1;
1316 enum machine_mode mode;
1318 /* The values computed by this function never change. */
1323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1324 if (targetm.calls.function_value_regno_p (regno))
1326 mode = reg_raw_mode[regno];
1328 gcc_assert (mode != VOIDmode);
1330 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1331 if (size % align != 0)
1332 size = CEIL (size, align) * align;
1333 size += GET_MODE_SIZE (mode);
1334 apply_result_mode[regno] = mode;
1337 apply_result_mode[regno] = VOIDmode;
1339 /* Allow targets that use untyped_call and untyped_return to override
1340 the size so that machine-specific information can be stored here. */
1341 #ifdef APPLY_RESULT_SIZE
1342 size = APPLY_RESULT_SIZE;
1348 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1349 /* Create a vector describing the result block RESULT. If SAVEP is true,
1350 the result block is used to save the values; otherwise it is used to
1351 restore the values. */
1354 result_vector (int savep, rtx result)
1356 int regno, size, align, nelts;
1357 enum machine_mode mode;
1359 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1363 if ((mode = apply_result_mode[regno]) != VOIDmode)
1365 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1366 if (size % align != 0)
1367 size = CEIL (size, align) * align;
1368 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1369 mem = adjust_address (result, mode, size);
1370 savevec[nelts++] = (savep
1371 ? gen_rtx_SET (VOIDmode, mem, reg)
1372 : gen_rtx_SET (VOIDmode, reg, mem));
1373 size += GET_MODE_SIZE (mode);
1375 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1377 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1379 /* Save the state required to perform an untyped call with the same
1380 arguments as were passed to the current function. */
1383 expand_builtin_apply_args_1 (void)
1386 int size, align, regno;
1387 enum machine_mode mode;
1388 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1390 /* Create a block where the arg-pointer, structure value address,
1391 and argument registers can be saved. */
1392 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1394 /* Walk past the arg-pointer and structure value address. */
1395 size = GET_MODE_SIZE (Pmode);
1396 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1397 size += GET_MODE_SIZE (Pmode);
1399 /* Save each register used in calling a function to the block. */
1400 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1401 if ((mode = apply_args_mode[regno]) != VOIDmode)
1403 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1404 if (size % align != 0)
1405 size = CEIL (size, align) * align;
1407 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1409 emit_move_insn (adjust_address (registers, mode, size), tem);
1410 size += GET_MODE_SIZE (mode);
1413 /* Save the arg pointer to the block. */
1414 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1415 #ifdef STACK_GROWS_DOWNWARD
1416 /* We need the pointer as the caller actually passed them to us, not
1417 as we might have pretended they were passed. Make sure it's a valid
1418 operand, as emit_move_insn isn't expected to handle a PLUS. */
1420 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1423 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1425 size = GET_MODE_SIZE (Pmode);
1427 /* Save the structure value address unless this is passed as an
1428 "invisible" first argument. */
1429 if (struct_incoming_value)
1431 emit_move_insn (adjust_address (registers, Pmode, size),
1432 copy_to_reg (struct_incoming_value));
1433 size += GET_MODE_SIZE (Pmode);
1436 /* Return the address of the block. */
1437 return copy_addr_to_reg (XEXP (registers, 0));
1440 /* __builtin_apply_args returns block of memory allocated on
1441 the stack into which is stored the arg pointer, structure
1442 value address, static chain, and all the registers that might
1443 possibly be used in performing a function call. The code is
1444 moved to the start of the function so the incoming values are
1448 expand_builtin_apply_args (void)
1450 /* Don't do __builtin_apply_args more than once in a function.
1451 Save the result of the first call and reuse it. */
1452 if (apply_args_value != 0)
1453 return apply_args_value;
1455 /* When this function is called, it means that registers must be
1456 saved on entry to this function. So we migrate the
1457 call to the first insn of this function. */
1462 temp = expand_builtin_apply_args_1 ();
1466 apply_args_value = temp;
1468 /* Put the insns after the NOTE that starts the function.
1469 If this is inside a start_sequence, make the outer-level insn
1470 chain current, so the code is placed at the start of the
1471 function. If internal_arg_pointer is a non-virtual pseudo,
1472 it needs to be placed after the function that initializes
1474 push_topmost_sequence ();
1475 if (REG_P (crtl->args.internal_arg_pointer)
1476 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1477 emit_insn_before (seq, parm_birth_insn);
1479 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1480 pop_topmost_sequence ();
1485 /* Perform an untyped call and save the state required to perform an
1486 untyped return of whatever value was returned by the given function. */
1489 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1491 int size, align, regno;
1492 enum machine_mode mode;
1493 rtx incoming_args, result, reg, dest, src, call_insn;
1494 rtx old_stack_level = 0;
1495 rtx call_fusage = 0;
1496 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1498 arguments = convert_memory_address (Pmode, arguments);
1500 /* Create a block where the return registers can be saved. */
1501 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1503 /* Fetch the arg pointer from the ARGUMENTS block. */
1504 incoming_args = gen_reg_rtx (Pmode);
1505 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1506 #ifndef STACK_GROWS_DOWNWARD
1507 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1508 incoming_args, 0, OPTAB_LIB_WIDEN);
1511 /* Push a new argument block and copy the arguments. Do not allow
1512 the (potential) memcpy call below to interfere with our stack
1514 do_pending_stack_adjust ();
1517 /* Save the stack with nonlocal if available. */
1518 #ifdef HAVE_save_stack_nonlocal
1519 if (HAVE_save_stack_nonlocal)
1520 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1523 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1525 /* Allocate a block of memory onto the stack and copy the memory
1526 arguments to the outgoing arguments address. */
1527 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1529 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1530 may have already set current_function_calls_alloca to true.
1531 current_function_calls_alloca won't be set if argsize is zero,
1532 so we have to guarantee need_drap is true here. */
1533 if (SUPPORTS_STACK_ALIGNMENT)
1534 crtl->need_drap = true;
1536 dest = virtual_outgoing_args_rtx;
1537 #ifndef STACK_GROWS_DOWNWARD
1538 if (CONST_INT_P (argsize))
1539 dest = plus_constant (dest, -INTVAL (argsize));
1541 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 dest = gen_rtx_MEM (BLKmode, dest);
1544 set_mem_align (dest, PARM_BOUNDARY);
1545 src = gen_rtx_MEM (BLKmode, incoming_args);
1546 set_mem_align (src, PARM_BOUNDARY);
1547 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1549 /* Refer to the argument block. */
1551 arguments = gen_rtx_MEM (BLKmode, arguments);
1552 set_mem_align (arguments, PARM_BOUNDARY);
1554 /* Walk past the arg-pointer and structure value address. */
1555 size = GET_MODE_SIZE (Pmode);
1557 size += GET_MODE_SIZE (Pmode);
1559 /* Restore each of the registers previously saved. Make USE insns
1560 for each of these registers for use in making the call. */
1561 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1562 if ((mode = apply_args_mode[regno]) != VOIDmode)
1564 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1565 if (size % align != 0)
1566 size = CEIL (size, align) * align;
1567 reg = gen_rtx_REG (mode, regno);
1568 emit_move_insn (reg, adjust_address (arguments, mode, size));
1569 use_reg (&call_fusage, reg);
1570 size += GET_MODE_SIZE (mode);
1573 /* Restore the structure value address unless this is passed as an
1574 "invisible" first argument. */
1575 size = GET_MODE_SIZE (Pmode);
1578 rtx value = gen_reg_rtx (Pmode);
1579 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1580 emit_move_insn (struct_value, value);
1581 if (REG_P (struct_value))
1582 use_reg (&call_fusage, struct_value);
1583 size += GET_MODE_SIZE (Pmode);
1586 /* All arguments and registers used for the call are set up by now! */
1587 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1589 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1590 and we don't want to load it into a register as an optimization,
1591 because prepare_call_address already did it if it should be done. */
1592 if (GET_CODE (function) != SYMBOL_REF)
1593 function = memory_address (FUNCTION_MODE, function);
1595 /* Generate the actual call instruction and save the return value. */
1596 #ifdef HAVE_untyped_call
1597 if (HAVE_untyped_call)
1598 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1599 result, result_vector (1, result)));
1602 #ifdef HAVE_call_value
1603 if (HAVE_call_value)
1607 /* Locate the unique return register. It is not possible to
1608 express a call that sets more than one return register using
1609 call_value; use untyped_call for that. In fact, untyped_call
1610 only needs to save the return registers in the given block. */
1611 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1612 if ((mode = apply_result_mode[regno]) != VOIDmode)
1614 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1616 valreg = gen_rtx_REG (mode, regno);
1619 emit_call_insn (GEN_CALL_VALUE (valreg,
1620 gen_rtx_MEM (FUNCTION_MODE, function),
1621 const0_rtx, NULL_RTX, const0_rtx));
1623 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1629 /* Find the CALL insn we just emitted, and attach the register usage
1631 call_insn = last_call_insn ();
1632 add_function_usage_to (call_insn, call_fusage);
1634 /* Restore the stack. */
1635 #ifdef HAVE_save_stack_nonlocal
1636 if (HAVE_save_stack_nonlocal)
1637 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1640 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1644 /* Return the address of the result block. */
1645 result = copy_addr_to_reg (XEXP (result, 0));
1646 return convert_memory_address (ptr_mode, result);
1649 /* Perform an untyped return. */
1652 expand_builtin_return (rtx result)
1654 int size, align, regno;
1655 enum machine_mode mode;
1657 rtx call_fusage = 0;
1659 result = convert_memory_address (Pmode, result);
1661 apply_result_size ();
1662 result = gen_rtx_MEM (BLKmode, result);
1664 #ifdef HAVE_untyped_return
1665 if (HAVE_untyped_return)
1667 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1673 /* Restore the return value and note that each value is used. */
1675 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1676 if ((mode = apply_result_mode[regno]) != VOIDmode)
1678 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1679 if (size % align != 0)
1680 size = CEIL (size, align) * align;
1681 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1682 emit_move_insn (reg, adjust_address (result, mode, size));
1684 push_to_sequence (call_fusage);
1686 call_fusage = get_insns ();
1688 size += GET_MODE_SIZE (mode);
1691 /* Put the USE insns before the return. */
1692 emit_insn (call_fusage);
1694 /* Return whatever values was restored by jumping directly to the end
1696 expand_naked_return ();
1699 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1701 static enum type_class
1702 type_to_class (tree type)
1704 switch (TREE_CODE (type))
1706 case VOID_TYPE: return void_type_class;
1707 case INTEGER_TYPE: return integer_type_class;
1708 case ENUMERAL_TYPE: return enumeral_type_class;
1709 case BOOLEAN_TYPE: return boolean_type_class;
1710 case POINTER_TYPE: return pointer_type_class;
1711 case REFERENCE_TYPE: return reference_type_class;
1712 case OFFSET_TYPE: return offset_type_class;
1713 case REAL_TYPE: return real_type_class;
1714 case COMPLEX_TYPE: return complex_type_class;
1715 case FUNCTION_TYPE: return function_type_class;
1716 case METHOD_TYPE: return method_type_class;
1717 case RECORD_TYPE: return record_type_class;
1719 case QUAL_UNION_TYPE: return union_type_class;
1720 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1721 ? string_type_class : array_type_class);
1722 case LANG_TYPE: return lang_type_class;
1723 default: return no_type_class;
1727 /* Expand a call EXP to __builtin_classify_type. */
1730 expand_builtin_classify_type (tree exp)
1732 if (call_expr_nargs (exp))
1733 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1734 return GEN_INT (no_type_class);
1737 /* This helper macro, meant to be used in mathfn_built_in below,
1738 determines which among a set of three builtin math functions is
1739 appropriate for a given type mode. The `F' and `L' cases are
1740 automatically generated from the `double' case. */
1741 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1742 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1743 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1744 fcodel = BUILT_IN_MATHFN##L ; break;
1745 /* Similar to above, but appends _R after any F/L suffix. */
1746 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1747 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1748 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1749 fcodel = BUILT_IN_MATHFN##L_R ; break;
1751 /* Return mathematic function equivalent to FN but operating directly
1752 on TYPE, if available. If IMPLICIT is true find the function in
1753 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1754 can't do the conversion, return zero. */
1757 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1759 tree const *const fn_arr
1760 = implicit ? implicit_built_in_decls : built_in_decls;
1761 enum built_in_function fcode, fcodef, fcodel;
1765 CASE_MATHFN (BUILT_IN_ACOS)
1766 CASE_MATHFN (BUILT_IN_ACOSH)
1767 CASE_MATHFN (BUILT_IN_ASIN)
1768 CASE_MATHFN (BUILT_IN_ASINH)
1769 CASE_MATHFN (BUILT_IN_ATAN)
1770 CASE_MATHFN (BUILT_IN_ATAN2)
1771 CASE_MATHFN (BUILT_IN_ATANH)
1772 CASE_MATHFN (BUILT_IN_CBRT)
1773 CASE_MATHFN (BUILT_IN_CEIL)
1774 CASE_MATHFN (BUILT_IN_CEXPI)
1775 CASE_MATHFN (BUILT_IN_COPYSIGN)
1776 CASE_MATHFN (BUILT_IN_COS)
1777 CASE_MATHFN (BUILT_IN_COSH)
1778 CASE_MATHFN (BUILT_IN_DREM)
1779 CASE_MATHFN (BUILT_IN_ERF)
1780 CASE_MATHFN (BUILT_IN_ERFC)
1781 CASE_MATHFN (BUILT_IN_EXP)
1782 CASE_MATHFN (BUILT_IN_EXP10)
1783 CASE_MATHFN (BUILT_IN_EXP2)
1784 CASE_MATHFN (BUILT_IN_EXPM1)
1785 CASE_MATHFN (BUILT_IN_FABS)
1786 CASE_MATHFN (BUILT_IN_FDIM)
1787 CASE_MATHFN (BUILT_IN_FLOOR)
1788 CASE_MATHFN (BUILT_IN_FMA)
1789 CASE_MATHFN (BUILT_IN_FMAX)
1790 CASE_MATHFN (BUILT_IN_FMIN)
1791 CASE_MATHFN (BUILT_IN_FMOD)
1792 CASE_MATHFN (BUILT_IN_FREXP)
1793 CASE_MATHFN (BUILT_IN_GAMMA)
1794 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1795 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1796 CASE_MATHFN (BUILT_IN_HYPOT)
1797 CASE_MATHFN (BUILT_IN_ILOGB)
1798 CASE_MATHFN (BUILT_IN_INF)
1799 CASE_MATHFN (BUILT_IN_ISINF)
1800 CASE_MATHFN (BUILT_IN_J0)
1801 CASE_MATHFN (BUILT_IN_J1)
1802 CASE_MATHFN (BUILT_IN_JN)
1803 CASE_MATHFN (BUILT_IN_LCEIL)
1804 CASE_MATHFN (BUILT_IN_LDEXP)
1805 CASE_MATHFN (BUILT_IN_LFLOOR)
1806 CASE_MATHFN (BUILT_IN_LGAMMA)
1807 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1808 CASE_MATHFN (BUILT_IN_LLCEIL)
1809 CASE_MATHFN (BUILT_IN_LLFLOOR)
1810 CASE_MATHFN (BUILT_IN_LLRINT)
1811 CASE_MATHFN (BUILT_IN_LLROUND)
1812 CASE_MATHFN (BUILT_IN_LOG)
1813 CASE_MATHFN (BUILT_IN_LOG10)
1814 CASE_MATHFN (BUILT_IN_LOG1P)
1815 CASE_MATHFN (BUILT_IN_LOG2)
1816 CASE_MATHFN (BUILT_IN_LOGB)
1817 CASE_MATHFN (BUILT_IN_LRINT)
1818 CASE_MATHFN (BUILT_IN_LROUND)
1819 CASE_MATHFN (BUILT_IN_MODF)
1820 CASE_MATHFN (BUILT_IN_NAN)
1821 CASE_MATHFN (BUILT_IN_NANS)
1822 CASE_MATHFN (BUILT_IN_NEARBYINT)
1823 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1824 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1825 CASE_MATHFN (BUILT_IN_POW)
1826 CASE_MATHFN (BUILT_IN_POWI)
1827 CASE_MATHFN (BUILT_IN_POW10)
1828 CASE_MATHFN (BUILT_IN_REMAINDER)
1829 CASE_MATHFN (BUILT_IN_REMQUO)
1830 CASE_MATHFN (BUILT_IN_RINT)
1831 CASE_MATHFN (BUILT_IN_ROUND)
1832 CASE_MATHFN (BUILT_IN_SCALB)
1833 CASE_MATHFN (BUILT_IN_SCALBLN)
1834 CASE_MATHFN (BUILT_IN_SCALBN)
1835 CASE_MATHFN (BUILT_IN_SIGNBIT)
1836 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1837 CASE_MATHFN (BUILT_IN_SIN)
1838 CASE_MATHFN (BUILT_IN_SINCOS)
1839 CASE_MATHFN (BUILT_IN_SINH)
1840 CASE_MATHFN (BUILT_IN_SQRT)
1841 CASE_MATHFN (BUILT_IN_TAN)
1842 CASE_MATHFN (BUILT_IN_TANH)
1843 CASE_MATHFN (BUILT_IN_TGAMMA)
1844 CASE_MATHFN (BUILT_IN_TRUNC)
1845 CASE_MATHFN (BUILT_IN_Y0)
1846 CASE_MATHFN (BUILT_IN_Y1)
1847 CASE_MATHFN (BUILT_IN_YN)
1853 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1854 return fn_arr[fcode];
1855 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1856 return fn_arr[fcodef];
1857 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1858 return fn_arr[fcodel];
1863 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 mathfn_built_in (tree type, enum built_in_function fn)
1868 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1871 /* If errno must be maintained, expand the RTL to check if the result,
1872 TARGET, of a built-in function call, EXP, is NaN, and if so set
1876 expand_errno_check (tree exp, rtx target)
1878 rtx lab = gen_label_rtx ();
1880 /* Test the result; if it is NaN, set errno=EDOM because
1881 the argument was not in the domain. */
1882 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1883 NULL_RTX, NULL_RTX, lab,
1884 /* The jump is very likely. */
1885 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1888 /* If this built-in doesn't throw an exception, set errno directly. */
1889 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1891 #ifdef GEN_ERRNO_RTX
1892 rtx errno_rtx = GEN_ERRNO_RTX;
1895 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1903 /* Make sure the library call isn't expanded as a tail call. */
1904 CALL_EXPR_TAILCALL (exp) = 0;
1906 /* We can't set errno=EDOM directly; let the library call do it.
1907 Pop the arguments right away in case the call gets deleted. */
1909 expand_call (exp, target, 0);
1914 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1915 Return NULL_RTX if a normal call should be emitted rather than expanding
1916 the function in-line. EXP is the expression that is a call to the builtin
1917 function; if convenient, the result should be placed in TARGET.
1918 SUBTARGET may be used as the target for computing one of EXP's operands. */
1921 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1923 optab builtin_optab;
1925 tree fndecl = get_callee_fndecl (exp);
1926 enum machine_mode mode;
1927 bool errno_set = false;
1930 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1933 arg = CALL_EXPR_ARG (exp, 0);
1935 switch (DECL_FUNCTION_CODE (fndecl))
1937 CASE_FLT_FN (BUILT_IN_SQRT):
1938 errno_set = ! tree_expr_nonnegative_p (arg);
1939 builtin_optab = sqrt_optab;
1941 CASE_FLT_FN (BUILT_IN_EXP):
1942 errno_set = true; builtin_optab = exp_optab; break;
1943 CASE_FLT_FN (BUILT_IN_EXP10):
1944 CASE_FLT_FN (BUILT_IN_POW10):
1945 errno_set = true; builtin_optab = exp10_optab; break;
1946 CASE_FLT_FN (BUILT_IN_EXP2):
1947 errno_set = true; builtin_optab = exp2_optab; break;
1948 CASE_FLT_FN (BUILT_IN_EXPM1):
1949 errno_set = true; builtin_optab = expm1_optab; break;
1950 CASE_FLT_FN (BUILT_IN_LOGB):
1951 errno_set = true; builtin_optab = logb_optab; break;
1952 CASE_FLT_FN (BUILT_IN_LOG):
1953 errno_set = true; builtin_optab = log_optab; break;
1954 CASE_FLT_FN (BUILT_IN_LOG10):
1955 errno_set = true; builtin_optab = log10_optab; break;
1956 CASE_FLT_FN (BUILT_IN_LOG2):
1957 errno_set = true; builtin_optab = log2_optab; break;
1958 CASE_FLT_FN (BUILT_IN_LOG1P):
1959 errno_set = true; builtin_optab = log1p_optab; break;
1960 CASE_FLT_FN (BUILT_IN_ASIN):
1961 builtin_optab = asin_optab; break;
1962 CASE_FLT_FN (BUILT_IN_ACOS):
1963 builtin_optab = acos_optab; break;
1964 CASE_FLT_FN (BUILT_IN_TAN):
1965 builtin_optab = tan_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ATAN):
1967 builtin_optab = atan_optab; break;
1968 CASE_FLT_FN (BUILT_IN_FLOOR):
1969 builtin_optab = floor_optab; break;
1970 CASE_FLT_FN (BUILT_IN_CEIL):
1971 builtin_optab = ceil_optab; break;
1972 CASE_FLT_FN (BUILT_IN_TRUNC):
1973 builtin_optab = btrunc_optab; break;
1974 CASE_FLT_FN (BUILT_IN_ROUND):
1975 builtin_optab = round_optab; break;
1976 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1977 builtin_optab = nearbyint_optab;
1978 if (flag_trapping_math)
1980 /* Else fallthrough and expand as rint. */
1981 CASE_FLT_FN (BUILT_IN_RINT):
1982 builtin_optab = rint_optab; break;
1983 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1984 builtin_optab = significand_optab; break;
1989 /* Make a suitable register to place result in. */
1990 mode = TYPE_MODE (TREE_TYPE (exp));
1992 if (! flag_errno_math || ! HONOR_NANS (mode))
1995 /* Before working hard, check whether the instruction is available. */
1996 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1998 target = gen_reg_rtx (mode);
2000 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2001 need to expand the argument again. This way, we will not perform
2002 side-effects more the once. */
2003 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2005 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2009 /* Compute into TARGET.
2010 Set TARGET to wherever the result comes back. */
2011 target = expand_unop (mode, builtin_optab, op0, target, 0);
2016 expand_errno_check (exp, target);
2018 /* Output the entire sequence. */
2019 insns = get_insns ();
2025 /* If we were unable to expand via the builtin, stop the sequence
2026 (without outputting the insns) and call to the library function
2027 with the stabilized argument list. */
2031 return expand_call (exp, target, target == const0_rtx);
2034 /* Expand a call to the builtin binary math functions (pow and atan2).
2035 Return NULL_RTX if a normal call should be emitted rather than expanding the
2036 function in-line. EXP is the expression that is a call to the builtin
2037 function; if convenient, the result should be placed in TARGET.
2038 SUBTARGET may be used as the target for computing one of EXP's
2042 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2044 optab builtin_optab;
2045 rtx op0, op1, insns;
2046 int op1_type = REAL_TYPE;
2047 tree fndecl = get_callee_fndecl (exp);
2049 enum machine_mode mode;
2050 bool errno_set = true;
2052 switch (DECL_FUNCTION_CODE (fndecl))
2054 CASE_FLT_FN (BUILT_IN_SCALBN):
2055 CASE_FLT_FN (BUILT_IN_SCALBLN):
2056 CASE_FLT_FN (BUILT_IN_LDEXP):
2057 op1_type = INTEGER_TYPE;
2062 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2065 arg0 = CALL_EXPR_ARG (exp, 0);
2066 arg1 = CALL_EXPR_ARG (exp, 1);
2068 switch (DECL_FUNCTION_CODE (fndecl))
2070 CASE_FLT_FN (BUILT_IN_POW):
2071 builtin_optab = pow_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ATAN2):
2073 builtin_optab = atan2_optab; break;
2074 CASE_FLT_FN (BUILT_IN_SCALB):
2075 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 builtin_optab = scalb_optab; break;
2078 CASE_FLT_FN (BUILT_IN_SCALBN):
2079 CASE_FLT_FN (BUILT_IN_SCALBLN):
2080 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 /* Fall through... */
2083 CASE_FLT_FN (BUILT_IN_LDEXP):
2084 builtin_optab = ldexp_optab; break;
2085 CASE_FLT_FN (BUILT_IN_FMOD):
2086 builtin_optab = fmod_optab; break;
2087 CASE_FLT_FN (BUILT_IN_REMAINDER):
2088 CASE_FLT_FN (BUILT_IN_DREM):
2089 builtin_optab = remainder_optab; break;
2094 /* Make a suitable register to place result in. */
2095 mode = TYPE_MODE (TREE_TYPE (exp));
2097 /* Before working hard, check whether the instruction is available. */
2098 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2101 target = gen_reg_rtx (mode);
2103 if (! flag_errno_math || ! HONOR_NANS (mode))
2106 /* Always stabilize the argument list. */
2107 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2108 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2110 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2111 op1 = expand_normal (arg1);
2115 /* Compute into TARGET.
2116 Set TARGET to wherever the result comes back. */
2117 target = expand_binop (mode, builtin_optab, op0, op1,
2118 target, 0, OPTAB_DIRECT);
2120 /* If we were unable to expand via the builtin, stop the sequence
2121 (without outputting the insns) and call to the library function
2122 with the stabilized argument list. */
2126 return expand_call (exp, target, target == const0_rtx);
2130 expand_errno_check (exp, target);
2132 /* Output the entire sequence. */
2133 insns = get_insns ();
2140 /* Expand a call to the builtin sin and cos math functions.
2141 Return NULL_RTX if a normal call should be emitted rather than expanding the
2142 function in-line. EXP is the expression that is a call to the builtin
2143 function; if convenient, the result should be placed in TARGET.
2144 SUBTARGET may be used as the target for computing one of EXP's
2148 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2150 optab builtin_optab;
2152 tree fndecl = get_callee_fndecl (exp);
2153 enum machine_mode mode;
2156 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2159 arg = CALL_EXPR_ARG (exp, 0);
2161 switch (DECL_FUNCTION_CODE (fndecl))
2163 CASE_FLT_FN (BUILT_IN_SIN):
2164 CASE_FLT_FN (BUILT_IN_COS):
2165 builtin_optab = sincos_optab; break;
2170 /* Make a suitable register to place result in. */
2171 mode = TYPE_MODE (TREE_TYPE (exp));
2173 /* Check if sincos insn is available, otherwise fallback
2174 to sin or cos insn. */
2175 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2176 switch (DECL_FUNCTION_CODE (fndecl))
2178 CASE_FLT_FN (BUILT_IN_SIN):
2179 builtin_optab = sin_optab; break;
2180 CASE_FLT_FN (BUILT_IN_COS):
2181 builtin_optab = cos_optab; break;
2186 /* Before working hard, check whether the instruction is available. */
2187 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2189 target = gen_reg_rtx (mode);
2191 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2192 need to expand the argument again. This way, we will not perform
2193 side-effects more the once. */
2194 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2196 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2200 /* Compute into TARGET.
2201 Set TARGET to wherever the result comes back. */
2202 if (builtin_optab == sincos_optab)
2206 switch (DECL_FUNCTION_CODE (fndecl))
2208 CASE_FLT_FN (BUILT_IN_SIN):
2209 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 CASE_FLT_FN (BUILT_IN_COS):
2212 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2217 gcc_assert (result);
2221 target = expand_unop (mode, builtin_optab, op0, target, 0);
2226 /* Output the entire sequence. */
2227 insns = get_insns ();
2233 /* If we were unable to expand via the builtin, stop the sequence
2234 (without outputting the insns) and call to the library function
2235 with the stabilized argument list. */
2239 target = expand_call (exp, target, target == const0_rtx);
2244 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2245 return an RTL instruction code that implements the functionality.
2246 If that isn't possible or available return CODE_FOR_nothing. */
2248 static enum insn_code
2249 interclass_mathfn_icode (tree arg, tree fndecl)
2251 bool errno_set = false;
2252 optab builtin_optab = 0;
2253 enum machine_mode mode;
2255 switch (DECL_FUNCTION_CODE (fndecl))
2257 CASE_FLT_FN (BUILT_IN_ILOGB):
2258 errno_set = true; builtin_optab = ilogb_optab; break;
2259 CASE_FLT_FN (BUILT_IN_ISINF):
2260 builtin_optab = isinf_optab; break;
2261 case BUILT_IN_ISNORMAL:
2262 case BUILT_IN_ISFINITE:
2263 CASE_FLT_FN (BUILT_IN_FINITE):
2264 case BUILT_IN_FINITED32:
2265 case BUILT_IN_FINITED64:
2266 case BUILT_IN_FINITED128:
2267 case BUILT_IN_ISINFD32:
2268 case BUILT_IN_ISINFD64:
2269 case BUILT_IN_ISINFD128:
2270 /* These builtins have no optabs (yet). */
2276 /* There's no easy way to detect the case we need to set EDOM. */
2277 if (flag_errno_math && errno_set)
2278 return CODE_FOR_nothing;
2280 /* Optab mode depends on the mode of the input argument. */
2281 mode = TYPE_MODE (TREE_TYPE (arg));
2284 return optab_handler (builtin_optab, mode)->insn_code;
2285 return CODE_FOR_nothing;
2288 /* Expand a call to one of the builtin math functions that operate on
2289 floating point argument and output an integer result (ilogb, isinf,
2291 Return 0 if a normal call should be emitted rather than expanding the
2292 function in-line. EXP is the expression that is a call to the builtin
2293 function; if convenient, the result should be placed in TARGET.
2294 SUBTARGET may be used as the target for computing one of EXP's operands. */
2297 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2299 enum insn_code icode = CODE_FOR_nothing;
2301 tree fndecl = get_callee_fndecl (exp);
2302 enum machine_mode mode;
2305 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2308 arg = CALL_EXPR_ARG (exp, 0);
2309 icode = interclass_mathfn_icode (arg, fndecl);
2310 mode = TYPE_MODE (TREE_TYPE (arg));
2312 if (icode != CODE_FOR_nothing)
2314 rtx last = get_last_insn ();
2315 tree orig_arg = arg;
2316 /* Make a suitable register to place result in. */
2318 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2319 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2320 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2322 gcc_assert (insn_data[icode].operand[0].predicate
2323 (target, GET_MODE (target)));
2325 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2326 need to expand the argument again. This way, we will not perform
2327 side-effects more the once. */
2328 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2330 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2332 if (mode != GET_MODE (op0))
2333 op0 = convert_to_mode (mode, op0, 0);
2335 /* Compute into TARGET.
2336 Set TARGET to wherever the result comes back. */
2337 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 delete_insns_since (last);
2340 CALL_EXPR_ARG (exp, 0) = orig_arg;
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2352 expand_builtin_sincos (tree exp)
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2358 location_t loc = EXPR_LOCATION (exp);
2360 if (!validate_arglist (exp, REAL_TYPE,
2361 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2364 arg = CALL_EXPR_ARG (exp, 0);
2365 sinp = CALL_EXPR_ARG (exp, 1);
2366 cosp = CALL_EXPR_ARG (exp, 2);
2368 /* Make a suitable register to place result in. */
2369 mode = TYPE_MODE (TREE_TYPE (arg));
2371 /* Check if sincos insn is available, otherwise emit the call. */
2372 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2375 target1 = gen_reg_rtx (mode);
2376 target2 = gen_reg_rtx (mode);
2378 op0 = expand_normal (arg);
2379 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2380 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2382 /* Compute into target1 and target2.
2383 Set TARGET to wherever the result comes back. */
2384 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2385 gcc_assert (result);
2387 /* Move target1 and target2 to the memory locations indicated
2389 emit_move_insn (op1, target1);
2390 emit_move_insn (op2, target2);
2395 /* Expand a call to the internal cexpi builtin to the sincos math function.
2396 EXP is the expression that is a call to the builtin function; if convenient,
2397 the result should be placed in TARGET. SUBTARGET may be used as the target
2398 for computing one of EXP's operands. */
2401 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2403 tree fndecl = get_callee_fndecl (exp);
2405 enum machine_mode mode;
2407 location_t loc = EXPR_LOCATION (exp);
2409 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2412 arg = CALL_EXPR_ARG (exp, 0);
2413 type = TREE_TYPE (arg);
2414 mode = TYPE_MODE (TREE_TYPE (arg));
2416 /* Try expanding via a sincos optab, fall back to emitting a libcall
2417 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2418 is only generated from sincos, cexp or if we have either of them. */
2419 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2421 op1 = gen_reg_rtx (mode);
2422 op2 = gen_reg_rtx (mode);
2424 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2426 /* Compute into op1 and op2. */
2427 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2429 else if (TARGET_HAS_SINCOS)
2431 tree call, fn = NULL_TREE;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2436 fn = built_in_decls[BUILT_IN_SINCOSF];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2438 fn = built_in_decls[BUILT_IN_SINCOS];
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2440 fn = built_in_decls[BUILT_IN_SINCOSL];
2444 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2445 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2447 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2448 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2449 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2451 /* Make sure not to fold the sincos call again. */
2452 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2453 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2454 call, 3, arg, top1, top2));
2458 tree call, fn = NULL_TREE, narg;
2459 tree ctype = build_complex_type (type);
2461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2462 fn = built_in_decls[BUILT_IN_CEXPF];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2464 fn = built_in_decls[BUILT_IN_CEXP];
2465 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2466 fn = built_in_decls[BUILT_IN_CEXPL];
2470 /* If we don't have a decl for cexp create one. This is the
2471 friendliest fallback if the user calls __builtin_cexpi
2472 without full target C99 function support. */
2473 if (fn == NULL_TREE)
2476 const char *name = NULL;
2478 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2485 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2486 fn = build_fn_decl (name, fntype);
2489 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2490 build_real (type, dconst0), arg);
2492 /* Make sure not to fold the cexp call again. */
2493 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2494 return expand_expr (build_call_nary (ctype, call, 1, narg),
2495 target, VOIDmode, EXPAND_NORMAL);
2498 /* Now build the proper return type. */
2499 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2500 make_tree (TREE_TYPE (arg), op2),
2501 make_tree (TREE_TYPE (arg), op1)),
2502 target, VOIDmode, EXPAND_NORMAL);
2505 /* Conveniently construct a function call expression. FNDECL names the
2506 function to be called, N is the number of arguments, and the "..."
2507 parameters are the argument expressions. Unlike build_call_exr
2508 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2511 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2514 tree fntype = TREE_TYPE (fndecl);
2515 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2518 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 SET_EXPR_LOCATION (fn, loc);
2523 #define build_call_nofold(...) \
2524 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2526 /* Expand a call to one of the builtin rounding functions gcc defines
2527 as an extension (lfloor and lceil). As these are gcc extensions we
2528 do not need to worry about setting errno to EDOM.
2529 If expanding via optab fails, lower expression to (int)(floor(x)).
2530 EXP is the expression that is a call to the builtin function;
2531 if convenient, the result should be placed in TARGET. */
2534 expand_builtin_int_roundingfn (tree exp, rtx target)
2536 convert_optab builtin_optab;
2537 rtx op0, insns, tmp;
2538 tree fndecl = get_callee_fndecl (exp);
2539 enum built_in_function fallback_fn;
2540 tree fallback_fndecl;
2541 enum machine_mode mode;
2544 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2547 arg = CALL_EXPR_ARG (exp, 0);
2549 switch (DECL_FUNCTION_CODE (fndecl))
2551 CASE_FLT_FN (BUILT_IN_LCEIL):
2552 CASE_FLT_FN (BUILT_IN_LLCEIL):
2553 builtin_optab = lceil_optab;
2554 fallback_fn = BUILT_IN_CEIL;
2557 CASE_FLT_FN (BUILT_IN_LFLOOR):
2558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2559 builtin_optab = lfloor_optab;
2560 fallback_fn = BUILT_IN_FLOOR;
2567 /* Make a suitable register to place result in. */
2568 mode = TYPE_MODE (TREE_TYPE (exp));
2570 target = gen_reg_rtx (mode);
2572 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2573 need to expand the argument again. This way, we will not perform
2574 side-effects more the once. */
2575 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2577 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2581 /* Compute into TARGET. */
2582 if (expand_sfix_optab (target, op0, builtin_optab))
2584 /* Output the entire sequence. */
2585 insns = get_insns ();
2591 /* If we were unable to expand via the builtin, stop the sequence
2592 (without outputting the insns). */
2595 /* Fall back to floating point rounding optab. */
2596 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2598 /* For non-C99 targets we may end up without a fallback fndecl here
2599 if the user called __builtin_lfloor directly. In this case emit
2600 a call to the floor/ceil variants nevertheless. This should result
2601 in the best user experience for not full C99 targets. */
2602 if (fallback_fndecl == NULL_TREE)
2605 const char *name = NULL;
2607 switch (DECL_FUNCTION_CODE (fndecl))
2609 case BUILT_IN_LCEIL:
2610 case BUILT_IN_LLCEIL:
2613 case BUILT_IN_LCEILF:
2614 case BUILT_IN_LLCEILF:
2617 case BUILT_IN_LCEILL:
2618 case BUILT_IN_LLCEILL:
2621 case BUILT_IN_LFLOOR:
2622 case BUILT_IN_LLFLOOR:
2625 case BUILT_IN_LFLOORF:
2626 case BUILT_IN_LLFLOORF:
2629 case BUILT_IN_LFLOORL:
2630 case BUILT_IN_LLFLOORL:
2637 fntype = build_function_type_list (TREE_TYPE (arg),
2638 TREE_TYPE (arg), NULL_TREE);
2639 fallback_fndecl = build_fn_decl (name, fntype);
2642 exp = build_call_nofold (fallback_fndecl, 1, arg);
2644 tmp = expand_normal (exp);
2646 /* Truncate the result of floating point optab to integer
2647 via expand_fix (). */
2648 target = gen_reg_rtx (mode);
2649 expand_fix (target, tmp, 0);
2654 /* Expand a call to one of the builtin math functions doing integer
2656 Return 0 if a normal call should be emitted rather than expanding the
2657 function in-line. EXP is the expression that is a call to the builtin
2658 function; if convenient, the result should be placed in TARGET. */
2661 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2663 convert_optab builtin_optab;
2665 tree fndecl = get_callee_fndecl (exp);
2667 enum machine_mode mode;
2669 /* There's no easy way to detect the case we need to set EDOM. */
2670 if (flag_errno_math)
2673 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2676 arg = CALL_EXPR_ARG (exp, 0);
2678 switch (DECL_FUNCTION_CODE (fndecl))
2680 CASE_FLT_FN (BUILT_IN_LRINT):
2681 CASE_FLT_FN (BUILT_IN_LLRINT):
2682 builtin_optab = lrint_optab; break;
2683 CASE_FLT_FN (BUILT_IN_LROUND):
2684 CASE_FLT_FN (BUILT_IN_LLROUND):
2685 builtin_optab = lround_optab; break;
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns) and call to the library function
2715 with the stabilized argument list. */
2718 target = expand_call (exp, target, target == const0_rtx);
2723 /* To evaluate powi(x,n), the floating point value x raised to the
2724 constant integer exponent n, we use a hybrid algorithm that
2725 combines the "window method" with look-up tables. For an
2726 introduction to exponentiation algorithms and "addition chains",
2727 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2728 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2729 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2730 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2732 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2733 multiplications to inline before calling the system library's pow
2734 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2735 so this default never requires calling pow, powf or powl. */
2737 #ifndef POWI_MAX_MULTS
2738 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2741 /* The size of the "optimal power tree" lookup table. All
2742 exponents less than this value are simply looked up in the
2743 powi_table below. This threshold is also used to size the
2744 cache of pseudo registers that hold intermediate results. */
2745 #define POWI_TABLE_SIZE 256
2747 /* The size, in bits of the window, used in the "window method"
2748 exponentiation algorithm. This is equivalent to a radix of
2749 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2750 #define POWI_WINDOW_SIZE 3
2752 /* The following table is an efficient representation of an
2753 "optimal power tree". For each value, i, the corresponding
2754 value, j, in the table states than an optimal evaluation
2755 sequence for calculating pow(x,i) can be found by evaluating
2756 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2757 100 integers is given in Knuth's "Seminumerical algorithms". */
2759 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2761 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2762 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2763 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2764 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2765 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2766 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2767 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2768 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2769 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2770 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2771 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2772 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2773 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2774 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2775 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2776 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2777 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2778 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2779 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2780 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2781 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2782 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2783 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2784 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2785 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2786 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2787 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2788 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2789 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2790 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2791 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2792 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2798 subroutine of powi_cost. CACHE is an array indicating
2799 which exponents have already been calculated. */
2802 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2804 /* If we've already calculated this exponent, then this evaluation
2805 doesn't require any additional multiplications. */
2810 return powi_lookup_cost (n - powi_table[n], cache)
2811 + powi_lookup_cost (powi_table[n], cache) + 1;
2814 /* Return the number of multiplications required to calculate
2815 powi(x,n) for an arbitrary x, given the exponent N. This
2816 function needs to be kept in sync with expand_powi below. */
2819 powi_cost (HOST_WIDE_INT n)
2821 bool cache[POWI_TABLE_SIZE];
2822 unsigned HOST_WIDE_INT digit;
2823 unsigned HOST_WIDE_INT val;
2829 /* Ignore the reciprocal when calculating the cost. */
2830 val = (n < 0) ? -n : n;
2832 /* Initialize the exponent cache. */
2833 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2838 while (val >= POWI_TABLE_SIZE)
2842 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2843 result += powi_lookup_cost (digit, cache)
2844 + POWI_WINDOW_SIZE + 1;
2845 val >>= POWI_WINDOW_SIZE;
2854 return result + powi_lookup_cost (val, cache);
2857 /* Recursive subroutine of expand_powi. This function takes the array,
2858 CACHE, of already calculated exponents and an exponent N and returns
2859 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2862 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2864 unsigned HOST_WIDE_INT digit;
2868 if (n < POWI_TABLE_SIZE)
2873 target = gen_reg_rtx (mode);
2876 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2877 op1 = expand_powi_1 (mode, powi_table[n], cache);
2881 target = gen_reg_rtx (mode);
2882 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2883 op0 = expand_powi_1 (mode, n - digit, cache);
2884 op1 = expand_powi_1 (mode, digit, cache);
2888 target = gen_reg_rtx (mode);
2889 op0 = expand_powi_1 (mode, n >> 1, cache);
2893 result = expand_mult (mode, op0, op1, target, 0);
2894 if (result != target)
2895 emit_move_insn (target, result);
2899 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2900 floating point operand in mode MODE, and N is the exponent. This
2901 function needs to be kept in sync with powi_cost above. */
2904 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2906 rtx cache[POWI_TABLE_SIZE];
2910 return CONST1_RTX (mode);
2912 memset (cache, 0, sizeof (cache));
2915 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2917 /* If the original exponent was negative, reciprocate the result. */
2919 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2920 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2925 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2926 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2927 if we can simplify it. */
2929 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2932 if (TREE_CODE (arg1) == REAL_CST
2933 && !TREE_OVERFLOW (arg1)
2934 && flag_unsafe_math_optimizations)
2936 enum machine_mode mode = TYPE_MODE (type);
2937 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2938 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2939 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2940 tree op = NULL_TREE;
2944 /* Optimize pow (x, 0.5) into sqrt. */
2945 if (REAL_VALUES_EQUAL (c, dconsthalf))
2946 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2950 REAL_VALUE_TYPE dconst1_4 = dconst1;
2951 REAL_VALUE_TYPE dconst3_4;
2952 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2954 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2955 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2957 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2958 machines that a builtin sqrt instruction is smaller than a
2959 call to pow with 0.25, so do this optimization even if
2961 if (REAL_VALUES_EQUAL (c, dconst1_4))
2963 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2964 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2967 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2968 are optimizing for space. */
2969 else if (optimize_insn_for_speed_p ()
2970 && !TREE_SIDE_EFFECTS (arg0)
2971 && REAL_VALUES_EQUAL (c, dconst3_4))
2973 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2974 tree sqrt2 = builtin_save_expr (sqrt1);
2975 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2976 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2981 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2982 cbrt/sqrts instead of pow (x, 1./6.). */
2984 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2986 /* First try 1/3. */
2987 REAL_VALUE_TYPE dconst1_3
2988 = real_value_truncate (mode, dconst_third ());
2990 if (REAL_VALUES_EQUAL (c, dconst1_3))
2991 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2994 else if (optimize_insn_for_speed_p ())
2996 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2997 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
2999 if (REAL_VALUES_EQUAL (c, dconst1_6))
3001 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3002 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3008 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3014 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3015 a normal call should be emitted rather than expanding the function
3016 in-line. EXP is the expression that is a call to the builtin
3017 function; if convenient, the result should be placed in TARGET. */
3020 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3024 tree type = TREE_TYPE (exp);
3025 REAL_VALUE_TYPE cint, c, c2;
3028 enum machine_mode mode = TYPE_MODE (type);
3030 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3033 arg0 = CALL_EXPR_ARG (exp, 0);
3034 arg1 = CALL_EXPR_ARG (exp, 1);
3036 if (TREE_CODE (arg1) != REAL_CST
3037 || TREE_OVERFLOW (arg1))
3038 return expand_builtin_mathfn_2 (exp, target, subtarget);
3040 /* Handle constant exponents. */
3042 /* For integer valued exponents we can expand to an optimal multiplication
3043 sequence using expand_powi. */
3044 c = TREE_REAL_CST (arg1);
3045 n = real_to_integer (&c);
3046 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3047 if (real_identical (&c, &cint)
3048 && ((n >= -1 && n <= 2)
3049 || (flag_unsafe_math_optimizations
3050 && optimize_insn_for_speed_p ()
3051 && powi_cost (n) <= POWI_MAX_MULTS)))
3053 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3056 op = force_reg (mode, op);
3057 op = expand_powi (op, mode, n);
3062 narg0 = builtin_save_expr (arg0);
3064 /* If the exponent is not integer valued, check if it is half of an integer.
3065 In this case we can expand to sqrt (x) * x**(n/2). */
3066 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3067 if (fn != NULL_TREE)
3069 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3070 n = real_to_integer (&c2);
3071 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3072 if (real_identical (&c2, &cint)
3073 && ((flag_unsafe_math_optimizations
3074 && optimize_insn_for_speed_p ()
3075 && powi_cost (n/2) <= POWI_MAX_MULTS)
3076 /* Even the c == 0.5 case cannot be done unconditionally
3077 when we need to preserve signed zeros, as
3078 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3079 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3080 /* For c == 1.5 we can assume that x * sqrt (x) is always
3081 smaller than pow (x, 1.5) if sqrt will not be expanded
3084 && (optab_handler (sqrt_optab, mode)->insn_code
3085 != CODE_FOR_nothing))))
3087 tree call_expr = build_call_nofold (fn, 1, narg0);
3088 /* Use expand_expr in case the newly built call expression
3089 was folded to a non-call. */
3090 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3093 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3094 op2 = force_reg (mode, op2);
3095 op2 = expand_powi (op2, mode, abs (n / 2));
3096 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3097 0, OPTAB_LIB_WIDEN);
3098 /* If the original exponent was negative, reciprocate the
3101 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3102 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3108 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3110 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3115 /* Try if the exponent is a third of an integer. In this case
3116 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3117 different from pow (x, 1./3.) due to rounding and behavior
3118 with negative x we need to constrain this transformation to
3119 unsafe math and positive x or finite math. */
3120 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3122 && flag_unsafe_math_optimizations
3123 && (tree_expr_nonnegative_p (arg0)
3124 || !HONOR_NANS (mode)))
3126 REAL_VALUE_TYPE dconst3;
3127 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3128 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3129 real_round (&c2, mode, &c2);
3130 n = real_to_integer (&c2);
3131 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3132 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3133 real_convert (&c2, mode, &c2);
3134 if (real_identical (&c2, &c)
3135 && ((optimize_insn_for_speed_p ()
3136 && powi_cost (n/3) <= POWI_MAX_MULTS)
3139 tree call_expr = build_call_nofold (fn, 1,narg0);
3140 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3141 if (abs (n) % 3 == 2)
3142 op = expand_simple_binop (mode, MULT, op, op, op,
3143 0, OPTAB_LIB_WIDEN);
3146 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3147 op2 = force_reg (mode, op2);
3148 op2 = expand_powi (op2, mode, abs (n / 3));
3149 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3150 0, OPTAB_LIB_WIDEN);
3151 /* If the original exponent was negative, reciprocate the
3154 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3155 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3161 /* Fall back to optab expansion. */
3162 return expand_builtin_mathfn_2 (exp, target, subtarget);
3165 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3166 a normal call should be emitted rather than expanding the function
3167 in-line. EXP is the expression that is a call to the builtin
3168 function; if convenient, the result should be placed in TARGET. */
3171 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3175 enum machine_mode mode;
3176 enum machine_mode mode2;
3178 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3181 arg0 = CALL_EXPR_ARG (exp, 0);
3182 arg1 = CALL_EXPR_ARG (exp, 1);
3183 mode = TYPE_MODE (TREE_TYPE (exp));
3185 /* Handle constant power. */
3187 if (TREE_CODE (arg1) == INTEGER_CST
3188 && !TREE_OVERFLOW (arg1))
3190 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3192 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3193 Otherwise, check the number of multiplications required. */
3194 if ((TREE_INT_CST_HIGH (arg1) == 0
3195 || TREE_INT_CST_HIGH (arg1) == -1)
3196 && ((n >= -1 && n <= 2)
3197 || (optimize_insn_for_speed_p ()
3198 && powi_cost (n) <= POWI_MAX_MULTS)))
3200 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3201 op0 = force_reg (mode, op0);
3202 return expand_powi (op0, mode, n);
3206 /* Emit a libcall to libgcc. */
3208 /* Mode of the 2nd argument must match that of an int. */
3209 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3211 if (target == NULL_RTX)
3212 target = gen_reg_rtx (mode);
3214 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3215 if (GET_MODE (op0) != mode)
3216 op0 = convert_to_mode (mode, op0, 0);
3217 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3218 if (GET_MODE (op1) != mode2)
3219 op1 = convert_to_mode (mode2, op1, 0);
3221 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3222 target, LCT_CONST, mode, 2,
3223 op0, mode, op1, mode2);
3228 /* Expand expression EXP which is a call to the strlen builtin. Return
3229 NULL_RTX if we failed the caller should emit a normal call, otherwise
3230 try to get the result in TARGET, if convenient. */
3233 expand_builtin_strlen (tree exp, rtx target,
3234 enum machine_mode target_mode)
3236 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3242 tree src = CALL_EXPR_ARG (exp, 0);
3243 rtx result, src_reg, char_rtx, before_strlen;
3244 enum machine_mode insn_mode = target_mode, char_mode;
3245 enum insn_code icode = CODE_FOR_nothing;
3248 /* If the length can be computed at compile-time, return it. */
3249 len = c_strlen (src, 0);
3251 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3253 /* If the length can be computed at compile-time and is constant
3254 integer, but there are side-effects in src, evaluate
3255 src for side-effects, then return len.
3256 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3257 can be optimized into: i++; x = 3; */
3258 len = c_strlen (src, 1);
3259 if (len && TREE_CODE (len) == INTEGER_CST)
3261 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3262 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3265 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3267 /* If SRC is not a pointer type, don't do this operation inline. */
3271 /* Bail out if we can't compute strlen in the right mode. */
3272 while (insn_mode != VOIDmode)
3274 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3275 if (icode != CODE_FOR_nothing)
3278 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3280 if (insn_mode == VOIDmode)
3283 /* Make a place to write the result of the instruction. */
3287 && GET_MODE (result) == insn_mode
3288 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3289 result = gen_reg_rtx (insn_mode);
3291 /* Make a place to hold the source address. We will not expand
3292 the actual source until we are sure that the expansion will
3293 not fail -- there are trees that cannot be expanded twice. */
3294 src_reg = gen_reg_rtx (Pmode);
3296 /* Mark the beginning of the strlen sequence so we can emit the
3297 source operand later. */
3298 before_strlen = get_last_insn ();
3300 char_rtx = const0_rtx;
3301 char_mode = insn_data[(int) icode].operand[2].mode;
3302 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3304 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3306 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3307 char_rtx, GEN_INT (align));
3312 /* Now that we are assured of success, expand the source. */
3314 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3316 emit_move_insn (src_reg, pat);
3321 emit_insn_after (pat, before_strlen);
3323 emit_insn_before (pat, get_insns ());
3325 /* Return the value in the proper mode for this function. */
3326 if (GET_MODE (result) == target_mode)
3328 else if (target != 0)
3329 convert_move (target, result, 0);
3331 target = convert_to_mode (target_mode, result, 0);
3337 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3338 bytes from constant string DATA + OFFSET and return it as target
3342 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3343 enum machine_mode mode)
3345 const char *str = (const char *) data;
3347 gcc_assert (offset >= 0
3348 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3349 <= strlen (str) + 1));
3351 return c_readstr (str + offset, mode);
3354 /* Expand a call EXP to the memcpy builtin.
3355 Return NULL_RTX if we failed, the caller should emit a normal call,
3356 otherwise try to get the result in TARGET, if convenient (and in
3357 mode MODE if that's convenient). */
3360 expand_builtin_memcpy (tree exp, rtx target)
3362 if (!validate_arglist (exp,
3363 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3367 tree dest = CALL_EXPR_ARG (exp, 0);
3368 tree src = CALL_EXPR_ARG (exp, 1);
3369 tree len = CALL_EXPR_ARG (exp, 2);
3370 const char *src_str;
3371 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3372 unsigned int dest_align
3373 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3374 rtx dest_mem, src_mem, dest_addr, len_rtx;
3375 HOST_WIDE_INT expected_size = -1;
3376 unsigned int expected_align = 0;
3378 /* If DEST is not a pointer type, call the normal function. */
3379 if (dest_align == 0)
3382 /* If either SRC is not a pointer type, don't do this
3383 operation in-line. */
3387 if (currently_expanding_gimple_stmt)
3388 stringop_block_profile (currently_expanding_gimple_stmt,
3389 &expected_align, &expected_size);
3391 if (expected_align < dest_align)
3392 expected_align = dest_align;
3393 dest_mem = get_memory_rtx (dest, len);
3394 set_mem_align (dest_mem, dest_align);
3395 len_rtx = expand_normal (len);
3396 src_str = c_getstr (src);
3398 /* If SRC is a string constant and block move would be done
3399 by pieces, we can avoid loading the string from memory
3400 and only stored the computed constants. */
3402 && CONST_INT_P (len_rtx)
3403 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3404 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3405 CONST_CAST (char *, src_str),
3408 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3409 builtin_memcpy_read_str,
3410 CONST_CAST (char *, src_str),
3411 dest_align, false, 0);
3412 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3413 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3417 src_mem = get_memory_rtx (src, len);
3418 set_mem_align (src_mem, src_align);
3420 /* Copy word part most expediently. */
3421 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3422 CALL_EXPR_TAILCALL (exp)
3423 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3424 expected_align, expected_size);
3428 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3429 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3435 /* Expand a call EXP to the mempcpy builtin.
3436 Return NULL_RTX if we failed; the caller should emit a normal call,
3437 otherwise try to get the result in TARGET, if convenient (and in
3438 mode MODE if that's convenient). If ENDP is 0 return the
3439 destination pointer, if ENDP is 1 return the end pointer ala
3440 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3444 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3446 if (!validate_arglist (exp,
3447 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3451 tree dest = CALL_EXPR_ARG (exp, 0);
3452 tree src = CALL_EXPR_ARG (exp, 1);
3453 tree len = CALL_EXPR_ARG (exp, 2);
3454 return expand_builtin_mempcpy_args (dest, src, len,
3455 target, mode, /*endp=*/ 1);
3459 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3460 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3461 so that this can also be called without constructing an actual CALL_EXPR.
3462 The other arguments and return value are the same as for
3463 expand_builtin_mempcpy. */
3466 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3467 rtx target, enum machine_mode mode, int endp)
3469 /* If return value is ignored, transform mempcpy into memcpy. */
3470 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3472 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3473 tree result = build_call_nofold (fn, 3, dest, src, len);
3474 return expand_expr (result, target, mode, EXPAND_NORMAL);
3478 const char *src_str;
3479 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3480 unsigned int dest_align
3481 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3482 rtx dest_mem, src_mem, len_rtx;
3484 /* If either SRC or DEST is not a pointer type, don't do this
3485 operation in-line. */
3486 if (dest_align == 0 || src_align == 0)
3489 /* If LEN is not constant, call the normal function. */
3490 if (! host_integerp (len, 1))
3493 len_rtx = expand_normal (len);
3494 src_str = c_getstr (src);
3496 /* If SRC is a string constant and block move would be done
3497 by pieces, we can avoid loading the string from memory
3498 and only stored the computed constants. */
3500 && CONST_INT_P (len_rtx)
3501 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3502 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3503 CONST_CAST (char *, src_str),
3506 dest_mem = get_memory_rtx (dest, len);
3507 set_mem_align (dest_mem, dest_align);
3508 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3509 builtin_memcpy_read_str,
3510 CONST_CAST (char *, src_str),
3511 dest_align, false, endp);
3512 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3513 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3517 if (CONST_INT_P (len_rtx)
3518 && can_move_by_pieces (INTVAL (len_rtx),
3519 MIN (dest_align, src_align)))
3521 dest_mem = get_memory_rtx (dest, len);
3522 set_mem_align (dest_mem, dest_align);
3523 src_mem = get_memory_rtx (src, len);
3524 set_mem_align (src_mem, src_align);
3525 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3526 MIN (dest_align, src_align), endp);
3527 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3528 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3537 # define HAVE_movstr 0
3538 # define CODE_FOR_movstr CODE_FOR_nothing
3541 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3542 we failed, the caller should emit a normal call, otherwise try to
3543 get the result in TARGET, if convenient. If ENDP is 0 return the
3544 destination pointer, if ENDP is 1 return the end pointer ala
3545 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3549 expand_movstr (tree dest, tree src, rtx target, int endp)
3555 const struct insn_data * data;
3560 dest_mem = get_memory_rtx (dest, NULL);
3561 src_mem = get_memory_rtx (src, NULL);
3564 target = force_reg (Pmode, XEXP (dest_mem, 0));
3565 dest_mem = replace_equiv_address (dest_mem, target);
3566 end = gen_reg_rtx (Pmode);
3570 if (target == 0 || target == const0_rtx)
3572 end = gen_reg_rtx (Pmode);
3580 data = insn_data + CODE_FOR_movstr;
3582 if (data->operand[0].mode != VOIDmode)
3583 end = gen_lowpart (data->operand[0].mode, end);
3585 insn = data->genfun (end, dest_mem, src_mem);
3591 /* movstr is supposed to set end to the address of the NUL
3592 terminator. If the caller requested a mempcpy-like return value,
3594 if (endp == 1 && target != const0_rtx)
3596 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3597 emit_move_insn (target, force_operand (tem, NULL_RTX));
3603 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3604 NULL_RTX if we failed the caller should emit a normal call, otherwise
3605 try to get the result in TARGET, if convenient (and in mode MODE if that's
3609 expand_builtin_strcpy (tree exp, rtx target)
3611 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3613 tree dest = CALL_EXPR_ARG (exp, 0);
3614 tree src = CALL_EXPR_ARG (exp, 1);
3615 return expand_builtin_strcpy_args (dest, src, target);
3620 /* Helper function to do the actual work for expand_builtin_strcpy. The
3621 arguments to the builtin_strcpy call DEST and SRC are broken out
3622 so that this can also be called without constructing an actual CALL_EXPR.
3623 The other arguments and return value are the same as for
3624 expand_builtin_strcpy. */
3627 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3629 return expand_movstr (dest, src, target, /*endp=*/0);
3632 /* Expand a call EXP to the stpcpy builtin.
3633 Return NULL_RTX if we failed the caller should emit a normal call,
3634 otherwise try to get the result in TARGET, if convenient (and in
3635 mode MODE if that's convenient). */
3638 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3641 location_t loc = EXPR_LOCATION (exp);
3643 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3646 dst = CALL_EXPR_ARG (exp, 0);
3647 src = CALL_EXPR_ARG (exp, 1);
3649 /* If return value is ignored, transform stpcpy into strcpy. */
3650 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3652 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3653 tree result = build_call_nofold (fn, 2, dst, src);
3654 return expand_expr (result, target, mode, EXPAND_NORMAL);
3661 /* Ensure we get an actual string whose length can be evaluated at
3662 compile-time, not an expression containing a string. This is
3663 because the latter will potentially produce pessimized code
3664 when used to produce the return value. */
3665 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3666 return expand_movstr (dst, src, target, /*endp=*/2);
3668 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3669 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3670 target, mode, /*endp=*/2);
3675 if (TREE_CODE (len) == INTEGER_CST)
3677 rtx len_rtx = expand_normal (len);
3679 if (CONST_INT_P (len_rtx))
3681 ret = expand_builtin_strcpy_args (dst, src, target);
3687 if (mode != VOIDmode)
3688 target = gen_reg_rtx (mode);
3690 target = gen_reg_rtx (GET_MODE (ret));
3692 if (GET_MODE (target) != GET_MODE (ret))
3693 ret = gen_lowpart (GET_MODE (target), ret);
3695 ret = plus_constant (ret, INTVAL (len_rtx));
3696 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3704 return expand_movstr (dst, src, target, /*endp=*/2);
3708 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3709 bytes from constant string DATA + OFFSET and return it as target
3713 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3714 enum machine_mode mode)
3716 const char *str = (const char *) data;
3718 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3721 return c_readstr (str + offset, mode);
3724 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3725 NULL_RTX if we failed the caller should emit a normal call. */
3728 expand_builtin_strncpy (tree exp, rtx target)
3730 location_t loc = EXPR_LOCATION (exp);
3732 if (validate_arglist (exp,
3733 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree src = CALL_EXPR_ARG (exp, 1);
3737 tree len = CALL_EXPR_ARG (exp, 2);
3738 tree slen = c_strlen (src, 1);
3740 /* We must be passed a constant len and src parameter. */
3741 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3744 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3746 /* We're required to pad with trailing zeros if the requested
3747 len is greater than strlen(s2)+1. In that case try to
3748 use store_by_pieces, if it fails, punt. */
3749 if (tree_int_cst_lt (slen, len))
3751 unsigned int dest_align
3752 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3753 const char *p = c_getstr (src);
3756 if (!p || dest_align == 0 || !host_integerp (len, 1)
3757 || !can_store_by_pieces (tree_low_cst (len, 1),
3758 builtin_strncpy_read_str,
3759 CONST_CAST (char *, p),
3763 dest_mem = get_memory_rtx (dest, len);
3764 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3765 builtin_strncpy_read_str,
3766 CONST_CAST (char *, p), dest_align, false, 0);
3767 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3768 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3775 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3776 bytes from constant string DATA + OFFSET and return it as target
3780 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3781 enum machine_mode mode)
3783 const char *c = (const char *) data;
3784 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3786 memset (p, *c, GET_MODE_SIZE (mode));
3788 return c_readstr (p, mode);
3791 /* Callback routine for store_by_pieces. Return the RTL of a register
3792 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3793 char value given in the RTL register data. For example, if mode is
3794 4 bytes wide, return the RTL for 0x01010101*data. */
3797 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3798 enum machine_mode mode)
3804 size = GET_MODE_SIZE (mode);
3808 p = XALLOCAVEC (char, size);
3809 memset (p, 1, size);
3810 coeff = c_readstr (p, mode);
3812 target = convert_to_mode (mode, (rtx) data, 1);
3813 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3814 return force_reg (mode, target);
3817 /* Expand expression EXP, which is a call to the memset builtin. Return
3818 NULL_RTX if we failed the caller should emit a normal call, otherwise
3819 try to get the result in TARGET, if convenient (and in mode MODE if that's
3823 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3830 tree dest = CALL_EXPR_ARG (exp, 0);
3831 tree val = CALL_EXPR_ARG (exp, 1);
3832 tree len = CALL_EXPR_ARG (exp, 2);
3833 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3837 /* Helper function to do the actual work for expand_builtin_memset. The
3838 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3839 so that this can also be called without constructing an actual CALL_EXPR.
3840 The other arguments and return value are the same as for
3841 expand_builtin_memset. */
3844 expand_builtin_memset_args (tree dest, tree val, tree len,
3845 rtx target, enum machine_mode mode, tree orig_exp)
3848 enum built_in_function fcode;
3850 unsigned int dest_align;
3851 rtx dest_mem, dest_addr, len_rtx;
3852 HOST_WIDE_INT expected_size = -1;
3853 unsigned int expected_align = 0;
3855 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3857 /* If DEST is not a pointer type, don't do this operation in-line. */
3858 if (dest_align == 0)
3861 if (currently_expanding_gimple_stmt)
3862 stringop_block_profile (currently_expanding_gimple_stmt,
3863 &expected_align, &expected_size);
3865 if (expected_align < dest_align)
3866 expected_align = dest_align;
3868 /* If the LEN parameter is zero, return DEST. */
3869 if (integer_zerop (len))
3871 /* Evaluate and ignore VAL in case it has side-effects. */
3872 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3873 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3876 /* Stabilize the arguments in case we fail. */
3877 dest = builtin_save_expr (dest);
3878 val = builtin_save_expr (val);
3879 len = builtin_save_expr (len);
3881 len_rtx = expand_normal (len);
3882 dest_mem = get_memory_rtx (dest, len);
3884 if (TREE_CODE (val) != INTEGER_CST)
3888 val_rtx = expand_normal (val);
3889 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3892 /* Assume that we can memset by pieces if we can store
3893 * the coefficients by pieces (in the required modes).
3894 * We can't pass builtin_memset_gen_str as that emits RTL. */
3896 if (host_integerp (len, 1)
3897 && can_store_by_pieces (tree_low_cst (len, 1),
3898 builtin_memset_read_str, &c, dest_align,
3901 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3903 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3904 builtin_memset_gen_str, val_rtx, dest_align,
3907 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3908 dest_align, expected_align,
3912 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3913 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3917 if (target_char_cast (val, &c))
3922 if (host_integerp (len, 1)
3923 && can_store_by_pieces (tree_low_cst (len, 1),
3924 builtin_memset_read_str, &c, dest_align,
3926 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3927 builtin_memset_read_str, &c, dest_align, true, 0);
3928 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3929 dest_align, expected_align,
3933 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3934 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3938 set_mem_align (dest_mem, dest_align);
3939 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3940 CALL_EXPR_TAILCALL (orig_exp)
3941 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3942 expected_align, expected_size);
3946 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3947 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3953 fndecl = get_callee_fndecl (orig_exp);
3954 fcode = DECL_FUNCTION_CODE (fndecl);
3955 if (fcode == BUILT_IN_MEMSET)
3956 fn = build_call_nofold (fndecl, 3, dest, val, len);
3957 else if (fcode == BUILT_IN_BZERO)
3958 fn = build_call_nofold (fndecl, 2, dest, len);
3961 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3962 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3963 return expand_call (fn, target, target == const0_rtx);
3966 /* Expand expression EXP, which is a call to the bzero builtin. Return
3967 NULL_RTX if we failed the caller should emit a normal call. */
3970 expand_builtin_bzero (tree exp)
3973 location_t loc = EXPR_LOCATION (exp);
3975 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3978 dest = CALL_EXPR_ARG (exp, 0);
3979 size = CALL_EXPR_ARG (exp, 1);
3981 /* New argument list transforming bzero(ptr x, int y) to
3982 memset(ptr x, int 0, size_t y). This is done this way
3983 so that if it isn't expanded inline, we fallback to
3984 calling bzero instead of memset. */
3986 return expand_builtin_memset_args (dest, integer_zero_node,
3987 fold_convert_loc (loc, sizetype, size),
3988 const0_rtx, VOIDmode, exp);
3991 /* Expand expression EXP, which is a call to the memcmp built-in function.
3992 Return NULL_RTX if we failed and the
3993 caller should emit a normal call, otherwise try to get the result in
3994 TARGET, if convenient (and in mode MODE, if that's convenient). */
3997 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3998 ATTRIBUTE_UNUSED enum machine_mode mode)
4000 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4002 if (!validate_arglist (exp,
4003 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4006 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4008 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4011 tree arg1 = CALL_EXPR_ARG (exp, 0);
4012 tree arg2 = CALL_EXPR_ARG (exp, 1);
4013 tree len = CALL_EXPR_ARG (exp, 2);
4016 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4018 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4019 enum machine_mode insn_mode;
4021 #ifdef HAVE_cmpmemsi
4023 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4026 #ifdef HAVE_cmpstrnsi
4028 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4033 /* If we don't have POINTER_TYPE, call the function. */
4034 if (arg1_align == 0 || arg2_align == 0)
4037 /* Make a place to write the result of the instruction. */
4040 && REG_P (result) && GET_MODE (result) == insn_mode
4041 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4042 result = gen_reg_rtx (insn_mode);
4044 arg1_rtx = get_memory_rtx (arg1, len);
4045 arg2_rtx = get_memory_rtx (arg2, len);
4046 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4048 /* Set MEM_SIZE as appropriate. */
4049 if (CONST_INT_P (arg3_rtx))
4051 set_mem_size (arg1_rtx, arg3_rtx);
4052 set_mem_size (arg2_rtx, arg3_rtx);
4055 #ifdef HAVE_cmpmemsi
4057 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4058 GEN_INT (MIN (arg1_align, arg2_align)));
4061 #ifdef HAVE_cmpstrnsi
4063 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4064 GEN_INT (MIN (arg1_align, arg2_align)));
4072 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4073 TYPE_MODE (integer_type_node), 3,
4074 XEXP (arg1_rtx, 0), Pmode,
4075 XEXP (arg2_rtx, 0), Pmode,
4076 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4077 TYPE_UNSIGNED (sizetype)),
4078 TYPE_MODE (sizetype));
4080 /* Return the value in the proper mode for this function. */
4081 mode = TYPE_MODE (TREE_TYPE (exp));
4082 if (GET_MODE (result) == mode)
4084 else if (target != 0)
4086 convert_move (target, result, 0);
4090 return convert_to_mode (mode, result, 0);
4097 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4098 if we failed the caller should emit a normal call, otherwise try to get
4099 the result in TARGET, if convenient. */
4102 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4104 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4107 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4108 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4109 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4111 rtx arg1_rtx, arg2_rtx;
4112 rtx result, insn = NULL_RTX;
4114 tree arg1 = CALL_EXPR_ARG (exp, 0);
4115 tree arg2 = CALL_EXPR_ARG (exp, 1);
4118 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4120 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4122 /* If we don't have POINTER_TYPE, call the function. */
4123 if (arg1_align == 0 || arg2_align == 0)
4126 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4127 arg1 = builtin_save_expr (arg1);
4128 arg2 = builtin_save_expr (arg2);
4130 arg1_rtx = get_memory_rtx (arg1, NULL);
4131 arg2_rtx = get_memory_rtx (arg2, NULL);
4133 #ifdef HAVE_cmpstrsi
4134 /* Try to call cmpstrsi. */
4137 enum machine_mode insn_mode
4138 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4140 /* Make a place to write the result of the instruction. */
4143 && REG_P (result) && GET_MODE (result) == insn_mode
4144 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4145 result = gen_reg_rtx (insn_mode);
4147 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4148 GEN_INT (MIN (arg1_align, arg2_align)));
4151 #ifdef HAVE_cmpstrnsi
4152 /* Try to determine at least one length and call cmpstrnsi. */
4153 if (!insn && HAVE_cmpstrnsi)
4158 enum machine_mode insn_mode
4159 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4160 tree len1 = c_strlen (arg1, 1);
4161 tree len2 = c_strlen (arg2, 1);
4164 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4166 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4168 /* If we don't have a constant length for the first, use the length
4169 of the second, if we know it. We don't require a constant for
4170 this case; some cost analysis could be done if both are available
4171 but neither is constant. For now, assume they're equally cheap,
4172 unless one has side effects. If both strings have constant lengths,
4179 else if (TREE_SIDE_EFFECTS (len1))
4181 else if (TREE_SIDE_EFFECTS (len2))
4183 else if (TREE_CODE (len1) != INTEGER_CST)
4185 else if (TREE_CODE (len2) != INTEGER_CST)
4187 else if (tree_int_cst_lt (len1, len2))
4192 /* If both arguments have side effects, we cannot optimize. */
4193 if (!len || TREE_SIDE_EFFECTS (len))
4196 arg3_rtx = expand_normal (len);
4198 /* Make a place to write the result of the instruction. */
4201 && REG_P (result) && GET_MODE (result) == insn_mode
4202 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4203 result = gen_reg_rtx (insn_mode);
4205 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4206 GEN_INT (MIN (arg1_align, arg2_align)));
4212 enum machine_mode mode;
4215 /* Return the value in the proper mode for this function. */
4216 mode = TYPE_MODE (TREE_TYPE (exp));
4217 if (GET_MODE (result) == mode)
4220 return convert_to_mode (mode, result, 0);
4221 convert_move (target, result, 0);
4225 /* Expand the library call ourselves using a stabilized argument
4226 list to avoid re-evaluating the function's arguments twice. */
4227 #ifdef HAVE_cmpstrnsi
4230 fndecl = get_callee_fndecl (exp);
4231 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4232 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4233 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4234 return expand_call (fn, target, target == const0_rtx);
4240 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4241 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4242 the result in TARGET, if convenient. */
4245 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4246 ATTRIBUTE_UNUSED enum machine_mode mode)
4248 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4250 if (!validate_arglist (exp,
4251 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4254 /* If c_strlen can determine an expression for one of the string
4255 lengths, and it doesn't have side effects, then emit cmpstrnsi
4256 using length MIN(strlen(string)+1, arg3). */
4257 #ifdef HAVE_cmpstrnsi
4260 tree len, len1, len2;
4261 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4264 tree arg1 = CALL_EXPR_ARG (exp, 0);
4265 tree arg2 = CALL_EXPR_ARG (exp, 1);
4266 tree arg3 = CALL_EXPR_ARG (exp, 2);
4269 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4271 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4272 enum machine_mode insn_mode
4273 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4275 len1 = c_strlen (arg1, 1);
4276 len2 = c_strlen (arg2, 1);
4279 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4281 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4283 /* If we don't have a constant length for the first, use the length
4284 of the second, if we know it. We don't require a constant for
4285 this case; some cost analysis could be done if both are available
4286 but neither is constant. For now, assume they're equally cheap,
4287 unless one has side effects. If both strings have constant lengths,
4294 else if (TREE_SIDE_EFFECTS (len1))
4296 else if (TREE_SIDE_EFFECTS (len2))
4298 else if (TREE_CODE (len1) != INTEGER_CST)
4300 else if (TREE_CODE (len2) != INTEGER_CST)
4302 else if (tree_int_cst_lt (len1, len2))
4307 /* If both arguments have side effects, we cannot optimize. */
4308 if (!len || TREE_SIDE_EFFECTS (len))
4311 /* The actual new length parameter is MIN(len,arg3). */
4312 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4313 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4315 /* If we don't have POINTER_TYPE, call the function. */
4316 if (arg1_align == 0 || arg2_align == 0)
4319 /* Make a place to write the result of the instruction. */
4322 && REG_P (result) && GET_MODE (result) == insn_mode
4323 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4324 result = gen_reg_rtx (insn_mode);
4326 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4327 arg1 = builtin_save_expr (arg1);
4328 arg2 = builtin_save_expr (arg2);
4329 len = builtin_save_expr (len);
4331 arg1_rtx = get_memory_rtx (arg1, len);
4332 arg2_rtx = get_memory_rtx (arg2, len);
4333 arg3_rtx = expand_normal (len);
4334 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4335 GEN_INT (MIN (arg1_align, arg2_align)));
4340 /* Return the value in the proper mode for this function. */
4341 mode = TYPE_MODE (TREE_TYPE (exp));
4342 if (GET_MODE (result) == mode)
4345 return convert_to_mode (mode, result, 0);
4346 convert_move (target, result, 0);
4350 /* Expand the library call ourselves using a stabilized argument
4351 list to avoid re-evaluating the function's arguments twice. */
4352 fndecl = get_callee_fndecl (exp);
4353 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4354 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4355 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4356 return expand_call (fn, target, target == const0_rtx);
4362 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4363 if that's convenient. */
4366 expand_builtin_saveregs (void)
4370 /* Don't do __builtin_saveregs more than once in a function.
4371 Save the result of the first call and reuse it. */
4372 if (saveregs_value != 0)
4373 return saveregs_value;
4375 /* When this function is called, it means that registers must be
4376 saved on entry to this function. So we migrate the call to the
4377 first insn of this function. */
4381 /* Do whatever the machine needs done in this case. */
4382 val = targetm.calls.expand_builtin_saveregs ();
4387 saveregs_value = val;
4389 /* Put the insns after the NOTE that starts the function. If this
4390 is inside a start_sequence, make the outer-level insn chain current, so
4391 the code is placed at the start of the function. */
4392 push_topmost_sequence ();
4393 emit_insn_after (seq, entry_of_function ());
4394 pop_topmost_sequence ();
4399 /* __builtin_args_info (N) returns word N of the arg space info
4400 for the current function. The number and meanings of words
4401 is controlled by the definition of CUMULATIVE_ARGS. */
4404 expand_builtin_args_info (tree exp)
4406 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4407 int *word_ptr = (int *) &crtl->args.info;
4409 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4411 if (call_expr_nargs (exp) != 0)
4413 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4414 error ("argument of %<__builtin_args_info%> must be constant");
4417 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4419 if (wordnum < 0 || wordnum >= nwords)
4420 error ("argument of %<__builtin_args_info%> out of range");
4422 return GEN_INT (word_ptr[wordnum]);
4426 error ("missing argument in %<__builtin_args_info%>");
4431 /* Expand a call to __builtin_next_arg. */
4434 expand_builtin_next_arg (void)
4436 /* Checking arguments is already done in fold_builtin_next_arg
4437 that must be called before this function. */
4438 return expand_binop (ptr_mode, add_optab,
4439 crtl->args.internal_arg_pointer,
4440 crtl->args.arg_offset_rtx,
4441 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4444 /* Make it easier for the backends by protecting the valist argument
4445 from multiple evaluations. */
4448 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4450 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4452 gcc_assert (vatype != NULL_TREE);
4454 if (TREE_CODE (vatype) == ARRAY_TYPE)
4456 if (TREE_SIDE_EFFECTS (valist))
4457 valist = save_expr (valist);
4459 /* For this case, the backends will be expecting a pointer to
4460 vatype, but it's possible we've actually been given an array
4461 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4463 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4465 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4466 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4475 if (! TREE_SIDE_EFFECTS (valist))
4478 pt = build_pointer_type (vatype);
4479 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4480 TREE_SIDE_EFFECTS (valist) = 1;
4483 if (TREE_SIDE_EFFECTS (valist))
4484 valist = save_expr (valist);
4485 valist = build_fold_indirect_ref_loc (loc, valist);
4491 /* The "standard" definition of va_list is void*. */
4494 std_build_builtin_va_list (void)
4496 return ptr_type_node;
4499 /* The "standard" abi va_list is va_list_type_node. */
4502 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4504 return va_list_type_node;
4507 /* The "standard" type of va_list is va_list_type_node. */
4510 std_canonical_va_list_type (tree type)
4514 if (INDIRECT_REF_P (type))
4515 type = TREE_TYPE (type);
4516 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4517 type = TREE_TYPE (type);
4518 wtype = va_list_type_node;
4520 /* Treat structure va_list types. */
4521 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4522 htype = TREE_TYPE (htype);
4523 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4525 /* If va_list is an array type, the argument may have decayed
4526 to a pointer type, e.g. by being passed to another function.
4527 In that case, unwrap both types so that we can compare the
4528 underlying records. */
4529 if (TREE_CODE (htype) == ARRAY_TYPE
4530 || POINTER_TYPE_P (htype))
4532 wtype = TREE_TYPE (wtype);
4533 htype = TREE_TYPE (htype);
4536 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4537 return va_list_type_node;
4542 /* The "standard" implementation of va_start: just assign `nextarg' to
4546 std_expand_builtin_va_start (tree valist, rtx nextarg)
4548 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4549 convert_move (va_r, nextarg, 0);
4552 /* Expand EXP, a call to __builtin_va_start. */
4555 expand_builtin_va_start (tree exp)
4559 location_t loc = EXPR_LOCATION (exp);
4561 if (call_expr_nargs (exp) < 2)
4563 error_at (loc, "too few arguments to function %<va_start%>");
4567 if (fold_builtin_next_arg (exp, true))
4570 nextarg = expand_builtin_next_arg ();
4571 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4573 if (targetm.expand_builtin_va_start)
4574 targetm.expand_builtin_va_start (valist, nextarg);
4576 std_expand_builtin_va_start (valist, nextarg);
4581 /* The "standard" implementation of va_arg: read the value from the
4582 current (padded) address and increment by the (padded) size. */
4585 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4588 tree addr, t, type_size, rounded_size, valist_tmp;
4589 unsigned HOST_WIDE_INT align, boundary;
4592 #ifdef ARGS_GROW_DOWNWARD
4593 /* All of the alignment and movement below is for args-grow-up machines.
4594 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4595 implement their own specialized gimplify_va_arg_expr routines. */
4599 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4601 type = build_pointer_type (type);
4603 align = PARM_BOUNDARY / BITS_PER_UNIT;
4604 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4606 /* When we align parameter on stack for caller, if the parameter
4607 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4608 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4609 here with caller. */
4610 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4611 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4613 boundary /= BITS_PER_UNIT;
4615 /* Hoist the valist value into a temporary for the moment. */
4616 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4618 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4619 requires greater alignment, we must perform dynamic alignment. */
4620 if (boundary > align
4621 && !integer_zerop (TYPE_SIZE (type)))
4623 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4624 fold_build2 (POINTER_PLUS_EXPR,
4626 valist_tmp, size_int (boundary - 1)));
4627 gimplify_and_add (t, pre_p);
4629 t = fold_convert (sizetype, valist_tmp);
4630 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4631 fold_convert (TREE_TYPE (valist),
4632 fold_build2 (BIT_AND_EXPR, sizetype, t,
4633 size_int (-boundary))));
4634 gimplify_and_add (t, pre_p);
4639 /* If the actual alignment is less than the alignment of the type,
4640 adjust the type accordingly so that we don't assume strict alignment
4641 when dereferencing the pointer. */
4642 boundary *= BITS_PER_UNIT;
4643 if (boundary < TYPE_ALIGN (type))
4645 type = build_variant_type_copy (type);
4646 TYPE_ALIGN (type) = boundary;
4649 /* Compute the rounded size of the type. */
4650 type_size = size_in_bytes (type);
4651 rounded_size = round_up (type_size, align);
4653 /* Reduce rounded_size so it's sharable with the postqueue. */
4654 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4658 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4660 /* Small args are padded downward. */
4661 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4662 rounded_size, size_int (align));
4663 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4664 size_binop (MINUS_EXPR, rounded_size, type_size));
4665 addr = fold_build2 (POINTER_PLUS_EXPR,
4666 TREE_TYPE (addr), addr, t);
4669 /* Compute new value for AP. */
4670 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4671 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4672 gimplify_and_add (t, pre_p);
4674 addr = fold_convert (build_pointer_type (type), addr);
4677 addr = build_va_arg_indirect_ref (addr);
4679 return build_va_arg_indirect_ref (addr);
4682 /* Build an indirect-ref expression over the given TREE, which represents a
4683 piece of a va_arg() expansion. */
4685 build_va_arg_indirect_ref (tree addr)
4687 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4689 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4695 /* Return a dummy expression of type TYPE in order to keep going after an
4699 dummy_object (tree type)
4701 tree t = build_int_cst (build_pointer_type (type), 0);
4702 return build1 (INDIRECT_REF, type, t);
4705 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4706 builtin function, but a very special sort of operator. */
4708 enum gimplify_status
4709 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4711 tree promoted_type, have_va_type;
4712 tree valist = TREE_OPERAND (*expr_p, 0);
4713 tree type = TREE_TYPE (*expr_p);
4715 location_t loc = EXPR_LOCATION (*expr_p);
4717 /* Verify that valist is of the proper type. */
4718 have_va_type = TREE_TYPE (valist);
4719 if (have_va_type == error_mark_node)
4721 have_va_type = targetm.canonical_va_list_type (have_va_type);
4723 if (have_va_type == NULL_TREE)
4725 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4729 /* Generate a diagnostic for requesting data of a type that cannot
4730 be passed through `...' due to type promotion at the call site. */
4731 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4734 static bool gave_help;
4737 /* Unfortunately, this is merely undefined, rather than a constraint
4738 violation, so we cannot make this an error. If this call is never
4739 executed, the program is still strictly conforming. */
4740 warned = warning_at (loc, 0,
4741 "%qT is promoted to %qT when passed through %<...%>",
4742 type, promoted_type);
4743 if (!gave_help && warned)
4746 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4747 promoted_type, type);
4750 /* We can, however, treat "undefined" any way we please.
4751 Call abort to encourage the user to fix the program. */
4753 inform (loc, "if this code is reached, the program will abort");
4754 /* Before the abort, allow the evaluation of the va_list
4755 expression to exit or longjmp. */
4756 gimplify_and_add (valist, pre_p);
4757 t = build_call_expr_loc (loc,
4758 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4759 gimplify_and_add (t, pre_p);
4761 /* This is dead code, but go ahead and finish so that the
4762 mode of the result comes out right. */
4763 *expr_p = dummy_object (type);
4768 /* Make it easier for the backends by protecting the valist argument
4769 from multiple evaluations. */
4770 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4772 /* For this case, the backends will be expecting a pointer to
4773 TREE_TYPE (abi), but it's possible we've
4774 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4776 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4778 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4779 valist = fold_convert_loc (loc, p1,
4780 build_fold_addr_expr_loc (loc, valist));
4783 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4786 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4788 if (!targetm.gimplify_va_arg_expr)
4789 /* FIXME: Once most targets are converted we should merely
4790 assert this is non-null. */
4793 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4798 /* Expand EXP, a call to __builtin_va_end. */
4801 expand_builtin_va_end (tree exp)
4803 tree valist = CALL_EXPR_ARG (exp, 0);
4805 /* Evaluate for side effects, if needed. I hate macros that don't
4807 if (TREE_SIDE_EFFECTS (valist))
4808 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4813 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4814 builtin rather than just as an assignment in stdarg.h because of the
4815 nastiness of array-type va_list types. */
4818 expand_builtin_va_copy (tree exp)
4821 location_t loc = EXPR_LOCATION (exp);
4823 dst = CALL_EXPR_ARG (exp, 0);
4824 src = CALL_EXPR_ARG (exp, 1);
4826 dst = stabilize_va_list_loc (loc, dst, 1);
4827 src = stabilize_va_list_loc (loc, src, 0);
4829 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4831 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4833 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4834 TREE_SIDE_EFFECTS (t) = 1;
4835 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4839 rtx dstb, srcb, size;
4841 /* Evaluate to pointers. */
4842 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4843 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4844 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4845 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4847 dstb = convert_memory_address (Pmode, dstb);
4848 srcb = convert_memory_address (Pmode, srcb);
4850 /* "Dereference" to BLKmode memories. */
4851 dstb = gen_rtx_MEM (BLKmode, dstb);
4852 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4853 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4854 srcb = gen_rtx_MEM (BLKmode, srcb);
4855 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4856 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4859 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4865 /* Expand a call to one of the builtin functions __builtin_frame_address or
4866 __builtin_return_address. */
4869 expand_builtin_frame_address (tree fndecl, tree exp)
4871 /* The argument must be a nonnegative integer constant.
4872 It counts the number of frames to scan up the stack.
4873 The value is the return address saved in that frame. */
4874 if (call_expr_nargs (exp) == 0)
4875 /* Warning about missing arg was already issued. */
4877 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4879 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4880 error ("invalid argument to %<__builtin_frame_address%>");
4882 error ("invalid argument to %<__builtin_return_address%>");
4888 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4889 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4891 /* Some ports cannot access arbitrary stack frames. */
4894 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4895 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4897 warning (0, "unsupported argument to %<__builtin_return_address%>");
4901 /* For __builtin_frame_address, return what we've got. */
4902 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4906 && ! CONSTANT_P (tem))
4907 tem = copy_to_mode_reg (Pmode, tem);
4912 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4913 we failed and the caller should emit a normal call, otherwise try to get
4914 the result in TARGET, if convenient. */
4917 expand_builtin_alloca (tree exp, rtx target)
4922 /* Emit normal call if marked not-inlineable. */
4923 if (CALL_CANNOT_INLINE_P (exp))
4926 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4929 /* Compute the argument. */
4930 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4932 /* Allocate the desired space. */
4933 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4934 result = convert_memory_address (ptr_mode, result);
4939 /* Expand a call to a bswap builtin with argument ARG0. MODE
4940 is the mode to expand with. */
4943 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4945 enum machine_mode mode;
4949 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4952 arg = CALL_EXPR_ARG (exp, 0);
4953 mode = TYPE_MODE (TREE_TYPE (arg));
4954 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4956 target = expand_unop (mode, bswap_optab, op0, target, 1);
4958 gcc_assert (target);
4960 return convert_to_mode (mode, target, 0);
4963 /* Expand a call to a unary builtin in EXP.
4964 Return NULL_RTX if a normal call should be emitted rather than expanding the
4965 function in-line. If convenient, the result should be placed in TARGET.
4966 SUBTARGET may be used as the target for computing one of EXP's operands. */
4969 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4970 rtx subtarget, optab op_optab)
4974 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4977 /* Compute the argument. */
4978 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4979 VOIDmode, EXPAND_NORMAL);
4980 /* Compute op, into TARGET if possible.
4981 Set TARGET to wherever the result comes back. */
4982 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4983 op_optab, op0, target, 1);
4984 gcc_assert (target);
4986 return convert_to_mode (target_mode, target, 0);
4989 /* Expand a call to __builtin_expect. We just return our argument
4990 as the builtin_expect semantic should've been already executed by
4991 tree branch prediction pass. */
4994 expand_builtin_expect (tree exp, rtx target)
4998 if (call_expr_nargs (exp) < 2)
5000 arg = CALL_EXPR_ARG (exp, 0);
5002 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5003 /* When guessing was done, the hints should be already stripped away. */
5004 gcc_assert (!flag_guess_branch_prob
5005 || optimize == 0 || errorcount || sorrycount);
5010 expand_builtin_trap (void)
5014 emit_insn (gen_trap ());
5017 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5021 /* Expand a call to __builtin_unreachable. We do nothing except emit
5022 a barrier saying that control flow will not pass here.
5024 It is the responsibility of the program being compiled to ensure
5025 that control flow does never reach __builtin_unreachable. */
5027 expand_builtin_unreachable (void)
5032 /* Expand EXP, a call to fabs, fabsf or fabsl.
5033 Return NULL_RTX if a normal call should be emitted rather than expanding
5034 the function inline. If convenient, the result should be placed
5035 in TARGET. SUBTARGET may be used as the target for computing
5039 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5041 enum machine_mode mode;
5045 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5048 arg = CALL_EXPR_ARG (exp, 0);
5049 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5050 mode = TYPE_MODE (TREE_TYPE (arg));
5051 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5052 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5055 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5056 Return NULL is a normal call should be emitted rather than expanding the
5057 function inline. If convenient, the result should be placed in TARGET.
5058 SUBTARGET may be used as the target for computing the operand. */
5061 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5066 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5069 arg = CALL_EXPR_ARG (exp, 0);
5070 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5072 arg = CALL_EXPR_ARG (exp, 1);
5073 op1 = expand_normal (arg);
5075 return expand_copysign (op0, op1, target);
5078 /* Create a new constant string literal and return a char* pointer to it.
5079 The STRING_CST value is the LEN characters at STR. */
5081 build_string_literal (int len, const char *str)
5083 tree t, elem, index, type;
5085 t = build_string (len, str);
5086 elem = build_type_variant (char_type_node, 1, 0);
5087 index = build_index_type (size_int (len - 1));
5088 type = build_array_type (elem, index);
5089 TREE_TYPE (t) = type;
5090 TREE_CONSTANT (t) = 1;
5091 TREE_READONLY (t) = 1;
5092 TREE_STATIC (t) = 1;
5094 type = build_pointer_type (elem);
5095 t = build1 (ADDR_EXPR, type,
5096 build4 (ARRAY_REF, elem,
5097 t, integer_zero_node, NULL_TREE, NULL_TREE));
5101 /* Expand a call to either the entry or exit function profiler. */
5104 expand_builtin_profile_func (bool exitp)
5106 rtx this_rtx, which;
5108 this_rtx = DECL_RTL (current_function_decl);
5109 gcc_assert (MEM_P (this_rtx));
5110 this_rtx = XEXP (this_rtx, 0);
5113 which = profile_function_exit_libfunc;
5115 which = profile_function_entry_libfunc;
5117 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5118 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5125 /* Expand a call to __builtin___clear_cache. */
5128 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5130 #ifndef HAVE_clear_cache
5131 #ifdef CLEAR_INSN_CACHE
5132 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5133 does something. Just do the default expansion to a call to
5137 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5138 does nothing. There is no need to call it. Do nothing. */
5140 #endif /* CLEAR_INSN_CACHE */
5142 /* We have a "clear_cache" insn, and it will handle everything. */
5144 rtx begin_rtx, end_rtx;
5145 enum insn_code icode;
5147 /* We must not expand to a library call. If we did, any
5148 fallback library function in libgcc that might contain a call to
5149 __builtin___clear_cache() would recurse infinitely. */
5150 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5152 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5156 if (HAVE_clear_cache)
5158 icode = CODE_FOR_clear_cache;
5160 begin = CALL_EXPR_ARG (exp, 0);
5161 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5162 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5163 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5164 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5166 end = CALL_EXPR_ARG (exp, 1);
5167 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5168 end_rtx = convert_memory_address (Pmode, end_rtx);
5169 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5170 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5172 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5175 #endif /* HAVE_clear_cache */
5178 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5181 round_trampoline_addr (rtx tramp)
5183 rtx temp, addend, mask;
5185 /* If we don't need too much alignment, we'll have been guaranteed
5186 proper alignment by get_trampoline_type. */
5187 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5190 /* Round address up to desired boundary. */
5191 temp = gen_reg_rtx (Pmode);
5192 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5193 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5195 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5196 temp, 0, OPTAB_LIB_WIDEN);
5197 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5198 temp, 0, OPTAB_LIB_WIDEN);
5204 expand_builtin_init_trampoline (tree exp)
5206 tree t_tramp, t_func, t_chain;
5207 rtx m_tramp, r_tramp, r_chain, tmp;
5209 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5210 POINTER_TYPE, VOID_TYPE))
5213 t_tramp = CALL_EXPR_ARG (exp, 0);
5214 t_func = CALL_EXPR_ARG (exp, 1);
5215 t_chain = CALL_EXPR_ARG (exp, 2);
5217 r_tramp = expand_normal (t_tramp);
5218 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5219 MEM_NOTRAP_P (m_tramp) = 1;
5221 /* The TRAMP argument should be the address of a field within the
5222 local function's FRAME decl. Let's see if we can fill in the
5223 to fill in the MEM_ATTRs for this memory. */
5224 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5225 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5228 tmp = round_trampoline_addr (r_tramp);
5231 m_tramp = change_address (m_tramp, BLKmode, tmp);
5232 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5233 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5236 /* The FUNC argument should be the address of the nested function.
5237 Extract the actual function decl to pass to the hook. */
5238 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5239 t_func = TREE_OPERAND (t_func, 0);
5240 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5242 r_chain = expand_normal (t_chain);
5244 /* Generate insns to initialize the trampoline. */
5245 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5247 trampolines_created = 1;
5252 expand_builtin_adjust_trampoline (tree exp)
5256 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5259 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5260 tramp = round_trampoline_addr (tramp);
5261 if (targetm.calls.trampoline_adjust_address)
5262 tramp = targetm.calls.trampoline_adjust_address (tramp);
5267 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5268 function. The function first checks whether the back end provides
5269 an insn to implement signbit for the respective mode. If not, it
5270 checks whether the floating point format of the value is such that
5271 the sign bit can be extracted. If that is not the case, the
5272 function returns NULL_RTX to indicate that a normal call should be
5273 emitted rather than expanding the function in-line. EXP is the
5274 expression that is a call to the builtin function; if convenient,
5275 the result should be placed in TARGET. */
5277 expand_builtin_signbit (tree exp, rtx target)
5279 const struct real_format *fmt;
5280 enum machine_mode fmode, imode, rmode;
5283 enum insn_code icode;
5285 location_t loc = EXPR_LOCATION (exp);
5287 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5290 arg = CALL_EXPR_ARG (exp, 0);
5291 fmode = TYPE_MODE (TREE_TYPE (arg));
5292 rmode = TYPE_MODE (TREE_TYPE (exp));
5293 fmt = REAL_MODE_FORMAT (fmode);
5295 arg = builtin_save_expr (arg);
5297 /* Expand the argument yielding a RTX expression. */
5298 temp = expand_normal (arg);
5300 /* Check if the back end provides an insn that handles signbit for the
5302 icode = signbit_optab->handlers [(int) fmode].insn_code;
5303 if (icode != CODE_FOR_nothing)
5305 rtx last = get_last_insn ();
5306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5307 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5309 delete_insns_since (last);
5312 /* For floating point formats without a sign bit, implement signbit
5314 bitpos = fmt->signbit_ro;
5317 /* But we can't do this if the format supports signed zero. */
5318 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5321 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5322 build_real (TREE_TYPE (arg), dconst0));
5323 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5326 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5328 imode = int_mode_for_mode (fmode);
5329 if (imode == BLKmode)
5331 temp = gen_lowpart (imode, temp);
5336 /* Handle targets with different FP word orders. */
5337 if (FLOAT_WORDS_BIG_ENDIAN)
5338 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5340 word = bitpos / BITS_PER_WORD;
5341 temp = operand_subword_force (temp, word, fmode);
5342 bitpos = bitpos % BITS_PER_WORD;
5345 /* Force the intermediate word_mode (or narrower) result into a
5346 register. This avoids attempting to create paradoxical SUBREGs
5347 of floating point modes below. */
5348 temp = force_reg (imode, temp);
5350 /* If the bitpos is within the "result mode" lowpart, the operation
5351 can be implement with a single bitwise AND. Otherwise, we need
5352 a right shift and an AND. */
5354 if (bitpos < GET_MODE_BITSIZE (rmode))
5356 double_int mask = double_int_setbit (double_int_zero, bitpos);
5358 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5359 temp = gen_lowpart (rmode, temp);
5360 temp = expand_binop (rmode, and_optab, temp,
5361 immed_double_int_const (mask, rmode),
5362 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5366 /* Perform a logical right shift to place the signbit in the least
5367 significant bit, then truncate the result to the desired mode
5368 and mask just this bit. */
5369 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5370 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5371 temp = gen_lowpart (rmode, temp);
5372 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5373 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5379 /* Expand fork or exec calls. TARGET is the desired target of the
5380 call. EXP is the call. FN is the
5381 identificator of the actual function. IGNORE is nonzero if the
5382 value is to be ignored. */
5385 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5390 /* If we are not profiling, just call the function. */
5391 if (!profile_arc_flag)
5394 /* Otherwise call the wrapper. This should be equivalent for the rest of
5395 compiler, so the code does not diverge, and the wrapper may run the
5396 code necessary for keeping the profiling sane. */
5398 switch (DECL_FUNCTION_CODE (fn))
5401 id = get_identifier ("__gcov_fork");
5404 case BUILT_IN_EXECL:
5405 id = get_identifier ("__gcov_execl");
5408 case BUILT_IN_EXECV:
5409 id = get_identifier ("__gcov_execv");
5412 case BUILT_IN_EXECLP:
5413 id = get_identifier ("__gcov_execlp");
5416 case BUILT_IN_EXECLE:
5417 id = get_identifier ("__gcov_execle");
5420 case BUILT_IN_EXECVP:
5421 id = get_identifier ("__gcov_execvp");
5424 case BUILT_IN_EXECVE:
5425 id = get_identifier ("__gcov_execve");
5432 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5433 FUNCTION_DECL, id, TREE_TYPE (fn));
5434 DECL_EXTERNAL (decl) = 1;
5435 TREE_PUBLIC (decl) = 1;
5436 DECL_ARTIFICIAL (decl) = 1;
5437 TREE_NOTHROW (decl) = 1;
5438 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5439 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5440 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5441 return expand_call (call, target, ignore);
5446 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5447 the pointer in these functions is void*, the tree optimizers may remove
5448 casts. The mode computed in expand_builtin isn't reliable either, due
5449 to __sync_bool_compare_and_swap.
5451 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5452 group of builtins. This gives us log2 of the mode size. */
5454 static inline enum machine_mode
5455 get_builtin_sync_mode (int fcode_diff)
5457 /* The size is not negotiable, so ask not to get BLKmode in return
5458 if the target indicates that a smaller size would be better. */
5459 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5462 /* Expand the memory expression LOC and return the appropriate memory operand
5463 for the builtin_sync operations. */
5466 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5470 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5471 addr = convert_memory_address (Pmode, addr);
5473 /* Note that we explicitly do not want any alias information for this
5474 memory, so that we kill all other live memories. Otherwise we don't
5475 satisfy the full barrier semantics of the intrinsic. */
5476 mem = validize_mem (gen_rtx_MEM (mode, addr));
5478 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5479 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5480 MEM_VOLATILE_P (mem) = 1;
5485 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5486 EXP is the CALL_EXPR. CODE is the rtx code
5487 that corresponds to the arithmetic or logical operation from the name;
5488 an exception here is that NOT actually means NAND. TARGET is an optional
5489 place for us to store the results; AFTER is true if this is the
5490 fetch_and_xxx form. IGNORE is true if we don't actually care about
5491 the result of the operation at all. */
5494 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5495 enum rtx_code code, bool after,
5496 rtx target, bool ignore)
5499 enum machine_mode old_mode;
5500 location_t loc = EXPR_LOCATION (exp);
5502 if (code == NOT && warn_sync_nand)
5504 tree fndecl = get_callee_fndecl (exp);
5505 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5507 static bool warned_f_a_n, warned_n_a_f;
5511 case BUILT_IN_FETCH_AND_NAND_1:
5512 case BUILT_IN_FETCH_AND_NAND_2:
5513 case BUILT_IN_FETCH_AND_NAND_4:
5514 case BUILT_IN_FETCH_AND_NAND_8:
5515 case BUILT_IN_FETCH_AND_NAND_16:
5520 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5521 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5522 warned_f_a_n = true;
5525 case BUILT_IN_NAND_AND_FETCH_1:
5526 case BUILT_IN_NAND_AND_FETCH_2:
5527 case BUILT_IN_NAND_AND_FETCH_4:
5528 case BUILT_IN_NAND_AND_FETCH_8:
5529 case BUILT_IN_NAND_AND_FETCH_16:
5534 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5535 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5536 warned_n_a_f = true;
5544 /* Expand the operands. */
5545 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5547 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5548 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5549 of CONST_INTs, where we know the old_mode only from the call argument. */
5550 old_mode = GET_MODE (val);
5551 if (old_mode == VOIDmode)
5552 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5553 val = convert_modes (mode, old_mode, val, 1);
5556 return expand_sync_operation (mem, val, code);
5558 return expand_sync_fetch_operation (mem, val, code, after, target);
5561 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5562 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5563 true if this is the boolean form. TARGET is a place for us to store the
5564 results; this is NOT optional if IS_BOOL is true. */
5567 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5568 bool is_bool, rtx target)
5570 rtx old_val, new_val, mem;
5571 enum machine_mode old_mode;
5573 /* Expand the operands. */
5574 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5577 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5578 mode, EXPAND_NORMAL);
5579 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5580 of CONST_INTs, where we know the old_mode only from the call argument. */
5581 old_mode = GET_MODE (old_val);
5582 if (old_mode == VOIDmode)
5583 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5584 old_val = convert_modes (mode, old_mode, old_val, 1);
5586 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5587 mode, EXPAND_NORMAL);
5588 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5589 of CONST_INTs, where we know the old_mode only from the call argument. */
5590 old_mode = GET_MODE (new_val);
5591 if (old_mode == VOIDmode)
5592 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5593 new_val = convert_modes (mode, old_mode, new_val, 1);
5596 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5598 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5601 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5602 general form is actually an atomic exchange, and some targets only
5603 support a reduced form with the second argument being a constant 1.
5604 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5608 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5612 enum machine_mode old_mode;
5614 /* Expand the operands. */
5615 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5616 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5617 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5618 of CONST_INTs, where we know the old_mode only from the call argument. */
5619 old_mode = GET_MODE (val);
5620 if (old_mode == VOIDmode)
5621 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5622 val = convert_modes (mode, old_mode, val, 1);
5624 return expand_sync_lock_test_and_set (mem, val, target);
5627 /* Expand the __sync_synchronize intrinsic. */
5630 expand_builtin_synchronize (void)
5633 VEC (tree, gc) *v_clobbers;
5635 #ifdef HAVE_memory_barrier
5636 if (HAVE_memory_barrier)
5638 emit_insn (gen_memory_barrier ());
5643 if (synchronize_libfunc != NULL_RTX)
5645 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5649 /* If no explicit memory barrier instruction is available, create an
5650 empty asm stmt with a memory clobber. */
5651 v_clobbers = VEC_alloc (tree, gc, 1);
5652 VEC_quick_push (tree, v_clobbers,
5653 tree_cons (NULL, build_string (6, "memory"), NULL));
5654 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5655 gimple_asm_set_volatile (x, true);
5656 expand_asm_stmt (x);
5659 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5662 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5664 enum insn_code icode;
5666 rtx val = const0_rtx;
5668 /* Expand the operands. */
5669 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5671 /* If there is an explicit operation in the md file, use it. */
5672 icode = sync_lock_release[mode];
5673 if (icode != CODE_FOR_nothing)
5675 if (!insn_data[icode].operand[1].predicate (val, mode))
5676 val = force_reg (mode, val);
5678 insn = GEN_FCN (icode) (mem, val);
5686 /* Otherwise we can implement this operation by emitting a barrier
5687 followed by a store of zero. */
5688 expand_builtin_synchronize ();
5689 emit_move_insn (mem, val);
5692 /* Expand an expression EXP that calls a built-in function,
5693 with result going to TARGET if that's convenient
5694 (and in mode MODE if that's convenient).
5695 SUBTARGET may be used as the target for computing one of EXP's operands.
5696 IGNORE is nonzero if the value is to be ignored. */
5699 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5702 tree fndecl = get_callee_fndecl (exp);
5703 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5704 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5706 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5707 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5709 /* When not optimizing, generate calls to library functions for a certain
5712 && !called_as_built_in (fndecl)
5713 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5714 && fcode != BUILT_IN_ALLOCA
5715 && fcode != BUILT_IN_FREE)
5716 return expand_call (exp, target, ignore);
5718 /* The built-in function expanders test for target == const0_rtx
5719 to determine whether the function's result will be ignored. */
5721 target = const0_rtx;
5723 /* If the result of a pure or const built-in function is ignored, and
5724 none of its arguments are volatile, we can avoid expanding the
5725 built-in call and just evaluate the arguments for side-effects. */
5726 if (target == const0_rtx
5727 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5729 bool volatilep = false;
5731 call_expr_arg_iterator iter;
5733 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5734 if (TREE_THIS_VOLATILE (arg))
5742 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5743 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5750 CASE_FLT_FN (BUILT_IN_FABS):
5751 target = expand_builtin_fabs (exp, target, subtarget);
5756 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5757 target = expand_builtin_copysign (exp, target, subtarget);
5762 /* Just do a normal library call if we were unable to fold
5764 CASE_FLT_FN (BUILT_IN_CABS):
5767 CASE_FLT_FN (BUILT_IN_EXP):
5768 CASE_FLT_FN (BUILT_IN_EXP10):
5769 CASE_FLT_FN (BUILT_IN_POW10):
5770 CASE_FLT_FN (BUILT_IN_EXP2):
5771 CASE_FLT_FN (BUILT_IN_EXPM1):
5772 CASE_FLT_FN (BUILT_IN_LOGB):
5773 CASE_FLT_FN (BUILT_IN_LOG):
5774 CASE_FLT_FN (BUILT_IN_LOG10):
5775 CASE_FLT_FN (BUILT_IN_LOG2):
5776 CASE_FLT_FN (BUILT_IN_LOG1P):
5777 CASE_FLT_FN (BUILT_IN_TAN):
5778 CASE_FLT_FN (BUILT_IN_ASIN):
5779 CASE_FLT_FN (BUILT_IN_ACOS):
5780 CASE_FLT_FN (BUILT_IN_ATAN):
5781 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5782 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5783 because of possible accuracy problems. */
5784 if (! flag_unsafe_math_optimizations)
5786 CASE_FLT_FN (BUILT_IN_SQRT):
5787 CASE_FLT_FN (BUILT_IN_FLOOR):
5788 CASE_FLT_FN (BUILT_IN_CEIL):
5789 CASE_FLT_FN (BUILT_IN_TRUNC):
5790 CASE_FLT_FN (BUILT_IN_ROUND):
5791 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5792 CASE_FLT_FN (BUILT_IN_RINT):
5793 target = expand_builtin_mathfn (exp, target, subtarget);
5798 CASE_FLT_FN (BUILT_IN_ILOGB):
5799 if (! flag_unsafe_math_optimizations)
5801 CASE_FLT_FN (BUILT_IN_ISINF):
5802 CASE_FLT_FN (BUILT_IN_FINITE):
5803 case BUILT_IN_ISFINITE:
5804 case BUILT_IN_ISNORMAL:
5805 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5810 CASE_FLT_FN (BUILT_IN_LCEIL):
5811 CASE_FLT_FN (BUILT_IN_LLCEIL):
5812 CASE_FLT_FN (BUILT_IN_LFLOOR):
5813 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5814 target = expand_builtin_int_roundingfn (exp, target);
5819 CASE_FLT_FN (BUILT_IN_LRINT):
5820 CASE_FLT_FN (BUILT_IN_LLRINT):
5821 CASE_FLT_FN (BUILT_IN_LROUND):
5822 CASE_FLT_FN (BUILT_IN_LLROUND):
5823 target = expand_builtin_int_roundingfn_2 (exp, target);
5828 CASE_FLT_FN (BUILT_IN_POW):
5829 target = expand_builtin_pow (exp, target, subtarget);
5834 CASE_FLT_FN (BUILT_IN_POWI):
5835 target = expand_builtin_powi (exp, target, subtarget);
5840 CASE_FLT_FN (BUILT_IN_ATAN2):
5841 CASE_FLT_FN (BUILT_IN_LDEXP):
5842 CASE_FLT_FN (BUILT_IN_SCALB):
5843 CASE_FLT_FN (BUILT_IN_SCALBN):
5844 CASE_FLT_FN (BUILT_IN_SCALBLN):
5845 if (! flag_unsafe_math_optimizations)
5848 CASE_FLT_FN (BUILT_IN_FMOD):
5849 CASE_FLT_FN (BUILT_IN_REMAINDER):
5850 CASE_FLT_FN (BUILT_IN_DREM):
5851 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5856 CASE_FLT_FN (BUILT_IN_CEXPI):
5857 target = expand_builtin_cexpi (exp, target, subtarget);
5858 gcc_assert (target);
5861 CASE_FLT_FN (BUILT_IN_SIN):
5862 CASE_FLT_FN (BUILT_IN_COS):
5863 if (! flag_unsafe_math_optimizations)
5865 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5870 CASE_FLT_FN (BUILT_IN_SINCOS):
5871 if (! flag_unsafe_math_optimizations)
5873 target = expand_builtin_sincos (exp);
5878 case BUILT_IN_APPLY_ARGS:
5879 return expand_builtin_apply_args ();
5881 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5882 FUNCTION with a copy of the parameters described by
5883 ARGUMENTS, and ARGSIZE. It returns a block of memory
5884 allocated on the stack into which is stored all the registers
5885 that might possibly be used for returning the result of a
5886 function. ARGUMENTS is the value returned by
5887 __builtin_apply_args. ARGSIZE is the number of bytes of
5888 arguments that must be copied. ??? How should this value be
5889 computed? We'll also need a safe worst case value for varargs
5891 case BUILT_IN_APPLY:
5892 if (!validate_arglist (exp, POINTER_TYPE,
5893 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5894 && !validate_arglist (exp, REFERENCE_TYPE,
5895 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5901 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5902 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5903 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5905 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5908 /* __builtin_return (RESULT) causes the function to return the
5909 value described by RESULT. RESULT is address of the block of
5910 memory returned by __builtin_apply. */
5911 case BUILT_IN_RETURN:
5912 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5913 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5916 case BUILT_IN_SAVEREGS:
5917 return expand_builtin_saveregs ();
5919 case BUILT_IN_ARGS_INFO:
5920 return expand_builtin_args_info (exp);
5922 case BUILT_IN_VA_ARG_PACK:
5923 /* All valid uses of __builtin_va_arg_pack () are removed during
5925 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5928 case BUILT_IN_VA_ARG_PACK_LEN:
5929 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5931 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5934 /* Return the address of the first anonymous stack arg. */
5935 case BUILT_IN_NEXT_ARG:
5936 if (fold_builtin_next_arg (exp, false))
5938 return expand_builtin_next_arg ();
5940 case BUILT_IN_CLEAR_CACHE:
5941 target = expand_builtin___clear_cache (exp);
5946 case BUILT_IN_CLASSIFY_TYPE:
5947 return expand_builtin_classify_type (exp);
5949 case BUILT_IN_CONSTANT_P:
5952 case BUILT_IN_FRAME_ADDRESS:
5953 case BUILT_IN_RETURN_ADDRESS:
5954 return expand_builtin_frame_address (fndecl, exp);
5956 /* Returns the address of the area where the structure is returned.
5958 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5959 if (call_expr_nargs (exp) != 0
5960 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5961 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5964 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5966 case BUILT_IN_ALLOCA:
5967 target = expand_builtin_alloca (exp, target);
5972 case BUILT_IN_STACK_SAVE:
5973 return expand_stack_save ();
5975 case BUILT_IN_STACK_RESTORE:
5976 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5979 case BUILT_IN_BSWAP32:
5980 case BUILT_IN_BSWAP64:
5981 target = expand_builtin_bswap (exp, target, subtarget);
5987 CASE_INT_FN (BUILT_IN_FFS):
5988 case BUILT_IN_FFSIMAX:
5989 target = expand_builtin_unop (target_mode, exp, target,
5990 subtarget, ffs_optab);
5995 CASE_INT_FN (BUILT_IN_CLZ):
5996 case BUILT_IN_CLZIMAX:
5997 target = expand_builtin_unop (target_mode, exp, target,
5998 subtarget, clz_optab);
6003 CASE_INT_FN (BUILT_IN_CTZ):
6004 case BUILT_IN_CTZIMAX:
6005 target = expand_builtin_unop (target_mode, exp, target,
6006 subtarget, ctz_optab);
6011 CASE_INT_FN (BUILT_IN_POPCOUNT):
6012 case BUILT_IN_POPCOUNTIMAX:
6013 target = expand_builtin_unop (target_mode, exp, target,
6014 subtarget, popcount_optab);
6019 CASE_INT_FN (BUILT_IN_PARITY):
6020 case BUILT_IN_PARITYIMAX:
6021 target = expand_builtin_unop (target_mode, exp, target,
6022 subtarget, parity_optab);
6027 case BUILT_IN_STRLEN:
6028 target = expand_builtin_strlen (exp, target, target_mode);
6033 case BUILT_IN_STRCPY:
6034 target = expand_builtin_strcpy (exp, target);
6039 case BUILT_IN_STRNCPY:
6040 target = expand_builtin_strncpy (exp, target);
6045 case BUILT_IN_STPCPY:
6046 target = expand_builtin_stpcpy (exp, target, mode);
6051 case BUILT_IN_MEMCPY:
6052 target = expand_builtin_memcpy (exp, target);
6057 case BUILT_IN_MEMPCPY:
6058 target = expand_builtin_mempcpy (exp, target, mode);
6063 case BUILT_IN_MEMSET:
6064 target = expand_builtin_memset (exp, target, mode);
6069 case BUILT_IN_BZERO:
6070 target = expand_builtin_bzero (exp);
6075 case BUILT_IN_STRCMP:
6076 target = expand_builtin_strcmp (exp, target);
6081 case BUILT_IN_STRNCMP:
6082 target = expand_builtin_strncmp (exp, target, mode);
6088 case BUILT_IN_MEMCMP:
6089 target = expand_builtin_memcmp (exp, target, mode);
6094 case BUILT_IN_SETJMP:
6095 /* This should have been lowered to the builtins below. */
6098 case BUILT_IN_SETJMP_SETUP:
6099 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6100 and the receiver label. */
6101 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6103 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6104 VOIDmode, EXPAND_NORMAL);
6105 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6106 rtx label_r = label_rtx (label);
6108 /* This is copied from the handling of non-local gotos. */
6109 expand_builtin_setjmp_setup (buf_addr, label_r);
6110 nonlocal_goto_handler_labels
6111 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6112 nonlocal_goto_handler_labels);
6113 /* ??? Do not let expand_label treat us as such since we would
6114 not want to be both on the list of non-local labels and on
6115 the list of forced labels. */
6116 FORCED_LABEL (label) = 0;
6121 case BUILT_IN_SETJMP_DISPATCHER:
6122 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6123 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6125 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6126 rtx label_r = label_rtx (label);
6128 /* Remove the dispatcher label from the list of non-local labels
6129 since the receiver labels have been added to it above. */
6130 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6135 case BUILT_IN_SETJMP_RECEIVER:
6136 /* __builtin_setjmp_receiver is passed the receiver label. */
6137 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6139 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6140 rtx label_r = label_rtx (label);
6142 expand_builtin_setjmp_receiver (label_r);
6147 /* __builtin_longjmp is passed a pointer to an array of five words.
6148 It's similar to the C library longjmp function but works with
6149 __builtin_setjmp above. */
6150 case BUILT_IN_LONGJMP:
6151 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6153 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6154 VOIDmode, EXPAND_NORMAL);
6155 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6157 if (value != const1_rtx)
6159 error ("%<__builtin_longjmp%> second argument must be 1");
6163 expand_builtin_longjmp (buf_addr, value);
6168 case BUILT_IN_NONLOCAL_GOTO:
6169 target = expand_builtin_nonlocal_goto (exp);
6174 /* This updates the setjmp buffer that is its argument with the value
6175 of the current stack pointer. */
6176 case BUILT_IN_UPDATE_SETJMP_BUF:
6177 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6180 = expand_normal (CALL_EXPR_ARG (exp, 0));
6182 expand_builtin_update_setjmp_buf (buf_addr);
6188 expand_builtin_trap ();
6191 case BUILT_IN_UNREACHABLE:
6192 expand_builtin_unreachable ();
6195 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6196 case BUILT_IN_SIGNBITD32:
6197 case BUILT_IN_SIGNBITD64:
6198 case BUILT_IN_SIGNBITD128:
6199 target = expand_builtin_signbit (exp, target);
6204 /* Various hooks for the DWARF 2 __throw routine. */
6205 case BUILT_IN_UNWIND_INIT:
6206 expand_builtin_unwind_init ();
6208 case BUILT_IN_DWARF_CFA:
6209 return virtual_cfa_rtx;
6210 #ifdef DWARF2_UNWIND_INFO
6211 case BUILT_IN_DWARF_SP_COLUMN:
6212 return expand_builtin_dwarf_sp_column ();
6213 case BUILT_IN_INIT_DWARF_REG_SIZES:
6214 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6217 case BUILT_IN_FROB_RETURN_ADDR:
6218 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6219 case BUILT_IN_EXTRACT_RETURN_ADDR:
6220 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6221 case BUILT_IN_EH_RETURN:
6222 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6223 CALL_EXPR_ARG (exp, 1));
6225 #ifdef EH_RETURN_DATA_REGNO
6226 case BUILT_IN_EH_RETURN_DATA_REGNO:
6227 return expand_builtin_eh_return_data_regno (exp);
6229 case BUILT_IN_EXTEND_POINTER:
6230 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6231 case BUILT_IN_EH_POINTER:
6232 return expand_builtin_eh_pointer (exp);
6233 case BUILT_IN_EH_FILTER:
6234 return expand_builtin_eh_filter (exp);
6235 case BUILT_IN_EH_COPY_VALUES:
6236 return expand_builtin_eh_copy_values (exp);
6238 case BUILT_IN_VA_START:
6239 return expand_builtin_va_start (exp);
6240 case BUILT_IN_VA_END:
6241 return expand_builtin_va_end (exp);
6242 case BUILT_IN_VA_COPY:
6243 return expand_builtin_va_copy (exp);
6244 case BUILT_IN_EXPECT:
6245 return expand_builtin_expect (exp, target);
6246 case BUILT_IN_PREFETCH:
6247 expand_builtin_prefetch (exp);
6250 case BUILT_IN_PROFILE_FUNC_ENTER:
6251 return expand_builtin_profile_func (false);
6252 case BUILT_IN_PROFILE_FUNC_EXIT:
6253 return expand_builtin_profile_func (true);
6255 case BUILT_IN_INIT_TRAMPOLINE:
6256 return expand_builtin_init_trampoline (exp);
6257 case BUILT_IN_ADJUST_TRAMPOLINE:
6258 return expand_builtin_adjust_trampoline (exp);
6261 case BUILT_IN_EXECL:
6262 case BUILT_IN_EXECV:
6263 case BUILT_IN_EXECLP:
6264 case BUILT_IN_EXECLE:
6265 case BUILT_IN_EXECVP:
6266 case BUILT_IN_EXECVE:
6267 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6272 case BUILT_IN_FETCH_AND_ADD_1:
6273 case BUILT_IN_FETCH_AND_ADD_2:
6274 case BUILT_IN_FETCH_AND_ADD_4:
6275 case BUILT_IN_FETCH_AND_ADD_8:
6276 case BUILT_IN_FETCH_AND_ADD_16:
6277 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6278 target = expand_builtin_sync_operation (mode, exp, PLUS,
6279 false, target, ignore);
6284 case BUILT_IN_FETCH_AND_SUB_1:
6285 case BUILT_IN_FETCH_AND_SUB_2:
6286 case BUILT_IN_FETCH_AND_SUB_4:
6287 case BUILT_IN_FETCH_AND_SUB_8:
6288 case BUILT_IN_FETCH_AND_SUB_16:
6289 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6290 target = expand_builtin_sync_operation (mode, exp, MINUS,
6291 false, target, ignore);
6296 case BUILT_IN_FETCH_AND_OR_1:
6297 case BUILT_IN_FETCH_AND_OR_2:
6298 case BUILT_IN_FETCH_AND_OR_4:
6299 case BUILT_IN_FETCH_AND_OR_8:
6300 case BUILT_IN_FETCH_AND_OR_16:
6301 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6302 target = expand_builtin_sync_operation (mode, exp, IOR,
6303 false, target, ignore);
6308 case BUILT_IN_FETCH_AND_AND_1:
6309 case BUILT_IN_FETCH_AND_AND_2:
6310 case BUILT_IN_FETCH_AND_AND_4:
6311 case BUILT_IN_FETCH_AND_AND_8:
6312 case BUILT_IN_FETCH_AND_AND_16:
6313 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6314 target = expand_builtin_sync_operation (mode, exp, AND,
6315 false, target, ignore);
6320 case BUILT_IN_FETCH_AND_XOR_1:
6321 case BUILT_IN_FETCH_AND_XOR_2:
6322 case BUILT_IN_FETCH_AND_XOR_4:
6323 case BUILT_IN_FETCH_AND_XOR_8:
6324 case BUILT_IN_FETCH_AND_XOR_16:
6325 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6326 target = expand_builtin_sync_operation (mode, exp, XOR,
6327 false, target, ignore);
6332 case BUILT_IN_FETCH_AND_NAND_1:
6333 case BUILT_IN_FETCH_AND_NAND_2:
6334 case BUILT_IN_FETCH_AND_NAND_4:
6335 case BUILT_IN_FETCH_AND_NAND_8:
6336 case BUILT_IN_FETCH_AND_NAND_16:
6337 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6338 target = expand_builtin_sync_operation (mode, exp, NOT,
6339 false, target, ignore);
6344 case BUILT_IN_ADD_AND_FETCH_1:
6345 case BUILT_IN_ADD_AND_FETCH_2:
6346 case BUILT_IN_ADD_AND_FETCH_4:
6347 case BUILT_IN_ADD_AND_FETCH_8:
6348 case BUILT_IN_ADD_AND_FETCH_16:
6349 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6350 target = expand_builtin_sync_operation (mode, exp, PLUS,
6351 true, target, ignore);
6356 case BUILT_IN_SUB_AND_FETCH_1:
6357 case BUILT_IN_SUB_AND_FETCH_2:
6358 case BUILT_IN_SUB_AND_FETCH_4:
6359 case BUILT_IN_SUB_AND_FETCH_8:
6360 case BUILT_IN_SUB_AND_FETCH_16:
6361 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6362 target = expand_builtin_sync_operation (mode, exp, MINUS,
6363 true, target, ignore);
6368 case BUILT_IN_OR_AND_FETCH_1:
6369 case BUILT_IN_OR_AND_FETCH_2:
6370 case BUILT_IN_OR_AND_FETCH_4:
6371 case BUILT_IN_OR_AND_FETCH_8:
6372 case BUILT_IN_OR_AND_FETCH_16:
6373 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6374 target = expand_builtin_sync_operation (mode, exp, IOR,
6375 true, target, ignore);
6380 case BUILT_IN_AND_AND_FETCH_1:
6381 case BUILT_IN_AND_AND_FETCH_2:
6382 case BUILT_IN_AND_AND_FETCH_4:
6383 case BUILT_IN_AND_AND_FETCH_8:
6384 case BUILT_IN_AND_AND_FETCH_16:
6385 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6386 target = expand_builtin_sync_operation (mode, exp, AND,
6387 true, target, ignore);
6392 case BUILT_IN_XOR_AND_FETCH_1:
6393 case BUILT_IN_XOR_AND_FETCH_2:
6394 case BUILT_IN_XOR_AND_FETCH_4:
6395 case BUILT_IN_XOR_AND_FETCH_8:
6396 case BUILT_IN_XOR_AND_FETCH_16:
6397 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6398 target = expand_builtin_sync_operation (mode, exp, XOR,
6399 true, target, ignore);
6404 case BUILT_IN_NAND_AND_FETCH_1:
6405 case BUILT_IN_NAND_AND_FETCH_2:
6406 case BUILT_IN_NAND_AND_FETCH_4:
6407 case BUILT_IN_NAND_AND_FETCH_8:
6408 case BUILT_IN_NAND_AND_FETCH_16:
6409 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6410 target = expand_builtin_sync_operation (mode, exp, NOT,
6411 true, target, ignore);
6416 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6417 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6418 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6419 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6420 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6421 if (mode == VOIDmode)
6422 mode = TYPE_MODE (boolean_type_node);
6423 if (!target || !register_operand (target, mode))
6424 target = gen_reg_rtx (mode);
6426 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6427 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6432 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6433 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6434 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6435 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6436 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6438 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6443 case BUILT_IN_LOCK_TEST_AND_SET_1:
6444 case BUILT_IN_LOCK_TEST_AND_SET_2:
6445 case BUILT_IN_LOCK_TEST_AND_SET_4:
6446 case BUILT_IN_LOCK_TEST_AND_SET_8:
6447 case BUILT_IN_LOCK_TEST_AND_SET_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6449 target = expand_builtin_lock_test_and_set (mode, exp, target);
6454 case BUILT_IN_LOCK_RELEASE_1:
6455 case BUILT_IN_LOCK_RELEASE_2:
6456 case BUILT_IN_LOCK_RELEASE_4:
6457 case BUILT_IN_LOCK_RELEASE_8:
6458 case BUILT_IN_LOCK_RELEASE_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6460 expand_builtin_lock_release (mode, exp);
6463 case BUILT_IN_SYNCHRONIZE:
6464 expand_builtin_synchronize ();
6467 case BUILT_IN_OBJECT_SIZE:
6468 return expand_builtin_object_size (exp);
6470 case BUILT_IN_MEMCPY_CHK:
6471 case BUILT_IN_MEMPCPY_CHK:
6472 case BUILT_IN_MEMMOVE_CHK:
6473 case BUILT_IN_MEMSET_CHK:
6474 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6479 case BUILT_IN_STRCPY_CHK:
6480 case BUILT_IN_STPCPY_CHK:
6481 case BUILT_IN_STRNCPY_CHK:
6482 case BUILT_IN_STRCAT_CHK:
6483 case BUILT_IN_STRNCAT_CHK:
6484 case BUILT_IN_SNPRINTF_CHK:
6485 case BUILT_IN_VSNPRINTF_CHK:
6486 maybe_emit_chk_warning (exp, fcode);
6489 case BUILT_IN_SPRINTF_CHK:
6490 case BUILT_IN_VSPRINTF_CHK:
6491 maybe_emit_sprintf_chk_warning (exp, fcode);
6495 maybe_emit_free_warning (exp);
6498 default: /* just do library call, if unknown builtin */
6502 /* The switch statement above can drop through to cause the function
6503 to be called normally. */
6504 return expand_call (exp, target, ignore);
6507 /* Determine whether a tree node represents a call to a built-in
6508 function. If the tree T is a call to a built-in function with
6509 the right number of arguments of the appropriate types, return
6510 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6511 Otherwise the return value is END_BUILTINS. */
6513 enum built_in_function
6514 builtin_mathfn_code (const_tree t)
6516 const_tree fndecl, arg, parmlist;
6517 const_tree argtype, parmtype;
6518 const_call_expr_arg_iterator iter;
6520 if (TREE_CODE (t) != CALL_EXPR
6521 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6522 return END_BUILTINS;
6524 fndecl = get_callee_fndecl (t);
6525 if (fndecl == NULL_TREE
6526 || TREE_CODE (fndecl) != FUNCTION_DECL
6527 || ! DECL_BUILT_IN (fndecl)
6528 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6529 return END_BUILTINS;
6531 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6532 init_const_call_expr_arg_iterator (t, &iter);
6533 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6535 /* If a function doesn't take a variable number of arguments,
6536 the last element in the list will have type `void'. */
6537 parmtype = TREE_VALUE (parmlist);
6538 if (VOID_TYPE_P (parmtype))
6540 if (more_const_call_expr_args_p (&iter))
6541 return END_BUILTINS;
6542 return DECL_FUNCTION_CODE (fndecl);
6545 if (! more_const_call_expr_args_p (&iter))
6546 return END_BUILTINS;
6548 arg = next_const_call_expr_arg (&iter);
6549 argtype = TREE_TYPE (arg);
6551 if (SCALAR_FLOAT_TYPE_P (parmtype))
6553 if (! SCALAR_FLOAT_TYPE_P (argtype))
6554 return END_BUILTINS;
6556 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6558 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6559 return END_BUILTINS;
6561 else if (POINTER_TYPE_P (parmtype))
6563 if (! POINTER_TYPE_P (argtype))
6564 return END_BUILTINS;
6566 else if (INTEGRAL_TYPE_P (parmtype))
6568 if (! INTEGRAL_TYPE_P (argtype))
6569 return END_BUILTINS;
6572 return END_BUILTINS;
6575 /* Variable-length argument list. */
6576 return DECL_FUNCTION_CODE (fndecl);
6579 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6580 evaluate to a constant. */
6583 fold_builtin_constant_p (tree arg)
6585 /* We return 1 for a numeric type that's known to be a constant
6586 value at compile-time or for an aggregate type that's a
6587 literal constant. */
6590 /* If we know this is a constant, emit the constant of one. */
6591 if (CONSTANT_CLASS_P (arg)
6592 || (TREE_CODE (arg) == CONSTRUCTOR
6593 && TREE_CONSTANT (arg)))
6594 return integer_one_node;
6595 if (TREE_CODE (arg) == ADDR_EXPR)
6597 tree op = TREE_OPERAND (arg, 0);
6598 if (TREE_CODE (op) == STRING_CST
6599 || (TREE_CODE (op) == ARRAY_REF
6600 && integer_zerop (TREE_OPERAND (op, 1))
6601 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6602 return integer_one_node;
6605 /* If this expression has side effects, show we don't know it to be a
6606 constant. Likewise if it's a pointer or aggregate type since in
6607 those case we only want literals, since those are only optimized
6608 when generating RTL, not later.
6609 And finally, if we are compiling an initializer, not code, we
6610 need to return a definite result now; there's not going to be any
6611 more optimization done. */
6612 if (TREE_SIDE_EFFECTS (arg)
6613 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6614 || POINTER_TYPE_P (TREE_TYPE (arg))
6616 || folding_initializer)
6617 return integer_zero_node;
6622 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6623 return it as a truthvalue. */
6626 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6628 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6630 fn = built_in_decls[BUILT_IN_EXPECT];
6631 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6632 ret_type = TREE_TYPE (TREE_TYPE (fn));
6633 pred_type = TREE_VALUE (arg_types);
6634 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6636 pred = fold_convert_loc (loc, pred_type, pred);
6637 expected = fold_convert_loc (loc, expected_type, expected);
6638 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6640 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6641 build_int_cst (ret_type, 0));
6644 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6645 NULL_TREE if no simplification is possible. */
6648 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6651 enum tree_code code;
6653 /* If this is a builtin_expect within a builtin_expect keep the
6654 inner one. See through a comparison against a constant. It
6655 might have been added to create a thruthvalue. */
6657 if (COMPARISON_CLASS_P (inner)
6658 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6659 inner = TREE_OPERAND (inner, 0);
6661 if (TREE_CODE (inner) == CALL_EXPR
6662 && (fndecl = get_callee_fndecl (inner))
6663 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6664 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6667 /* Distribute the expected value over short-circuiting operators.
6668 See through the cast from truthvalue_type_node to long. */
6670 while (TREE_CODE (inner) == NOP_EXPR
6671 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6672 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6673 inner = TREE_OPERAND (inner, 0);
6675 code = TREE_CODE (inner);
6676 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6678 tree op0 = TREE_OPERAND (inner, 0);
6679 tree op1 = TREE_OPERAND (inner, 1);
6681 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6682 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6683 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6685 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6688 /* If the argument isn't invariant then there's nothing else we can do. */
6689 if (!TREE_CONSTANT (arg0))
6692 /* If we expect that a comparison against the argument will fold to
6693 a constant return the constant. In practice, this means a true
6694 constant or the address of a non-weak symbol. */
6697 if (TREE_CODE (inner) == ADDR_EXPR)
6701 inner = TREE_OPERAND (inner, 0);
6703 while (TREE_CODE (inner) == COMPONENT_REF
6704 || TREE_CODE (inner) == ARRAY_REF);
6705 if ((TREE_CODE (inner) == VAR_DECL
6706 || TREE_CODE (inner) == FUNCTION_DECL)
6707 && DECL_WEAK (inner))
6711 /* Otherwise, ARG0 already has the proper type for the return value. */
6715 /* Fold a call to __builtin_classify_type with argument ARG. */
6718 fold_builtin_classify_type (tree arg)
6721 return build_int_cst (NULL_TREE, no_type_class);
6723 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6726 /* Fold a call to __builtin_strlen with argument ARG. */
6729 fold_builtin_strlen (location_t loc, tree type, tree arg)
6731 if (!validate_arg (arg, POINTER_TYPE))
6735 tree len = c_strlen (arg, 0);
6738 return fold_convert_loc (loc, type, len);
6744 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6747 fold_builtin_inf (location_t loc, tree type, int warn)
6749 REAL_VALUE_TYPE real;
6751 /* __builtin_inff is intended to be usable to define INFINITY on all
6752 targets. If an infinity is not available, INFINITY expands "to a
6753 positive constant of type float that overflows at translation
6754 time", footnote "In this case, using INFINITY will violate the
6755 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6756 Thus we pedwarn to ensure this constraint violation is
6758 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6759 pedwarn (loc, 0, "target format does not support infinity");
6762 return build_real (type, real);
6765 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6768 fold_builtin_nan (tree arg, tree type, int quiet)
6770 REAL_VALUE_TYPE real;
6773 if (!validate_arg (arg, POINTER_TYPE))
6775 str = c_getstr (arg);
6779 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6782 return build_real (type, real);
6785 /* Return true if the floating point expression T has an integer value.
6786 We also allow +Inf, -Inf and NaN to be considered integer values. */
6789 integer_valued_real_p (tree t)
6791 switch (TREE_CODE (t))
6798 return integer_valued_real_p (TREE_OPERAND (t, 0));
6803 return integer_valued_real_p (TREE_OPERAND (t, 1));
6810 return integer_valued_real_p (TREE_OPERAND (t, 0))
6811 && integer_valued_real_p (TREE_OPERAND (t, 1));
6814 return integer_valued_real_p (TREE_OPERAND (t, 1))
6815 && integer_valued_real_p (TREE_OPERAND (t, 2));
6818 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6822 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6823 if (TREE_CODE (type) == INTEGER_TYPE)
6825 if (TREE_CODE (type) == REAL_TYPE)
6826 return integer_valued_real_p (TREE_OPERAND (t, 0));
6831 switch (builtin_mathfn_code (t))
6833 CASE_FLT_FN (BUILT_IN_CEIL):
6834 CASE_FLT_FN (BUILT_IN_FLOOR):
6835 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6836 CASE_FLT_FN (BUILT_IN_RINT):
6837 CASE_FLT_FN (BUILT_IN_ROUND):
6838 CASE_FLT_FN (BUILT_IN_TRUNC):
6841 CASE_FLT_FN (BUILT_IN_FMIN):
6842 CASE_FLT_FN (BUILT_IN_FMAX):
6843 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6844 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6857 /* FNDECL is assumed to be a builtin where truncation can be propagated
6858 across (for instance floor((double)f) == (double)floorf (f).
6859 Do the transformation for a call with argument ARG. */
6862 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6864 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6866 if (!validate_arg (arg, REAL_TYPE))
6869 /* Integer rounding functions are idempotent. */
6870 if (fcode == builtin_mathfn_code (arg))
6873 /* If argument is already integer valued, and we don't need to worry
6874 about setting errno, there's no need to perform rounding. */
6875 if (! flag_errno_math && integer_valued_real_p (arg))
6880 tree arg0 = strip_float_extensions (arg);
6881 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6882 tree newtype = TREE_TYPE (arg0);
6885 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6886 && (decl = mathfn_built_in (newtype, fcode)))
6887 return fold_convert_loc (loc, ftype,
6888 build_call_expr_loc (loc, decl, 1,
6889 fold_convert_loc (loc,
6896 /* FNDECL is assumed to be builtin which can narrow the FP type of
6897 the argument, for instance lround((double)f) -> lroundf (f).
6898 Do the transformation for a call with argument ARG. */
6901 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6903 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6905 if (!validate_arg (arg, REAL_TYPE))
6908 /* If argument is already integer valued, and we don't need to worry
6909 about setting errno, there's no need to perform rounding. */
6910 if (! flag_errno_math && integer_valued_real_p (arg))
6911 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6912 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6916 tree ftype = TREE_TYPE (arg);
6917 tree arg0 = strip_float_extensions (arg);
6918 tree newtype = TREE_TYPE (arg0);
6921 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6922 && (decl = mathfn_built_in (newtype, fcode)))
6923 return build_call_expr_loc (loc, decl, 1,
6924 fold_convert_loc (loc, newtype, arg0));
6927 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6928 sizeof (long long) == sizeof (long). */
6929 if (TYPE_PRECISION (long_long_integer_type_node)
6930 == TYPE_PRECISION (long_integer_type_node))
6932 tree newfn = NULL_TREE;
6935 CASE_FLT_FN (BUILT_IN_LLCEIL):
6936 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6939 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6940 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6943 CASE_FLT_FN (BUILT_IN_LLROUND):
6944 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6947 CASE_FLT_FN (BUILT_IN_LLRINT):
6948 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6957 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6958 return fold_convert_loc (loc,
6959 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6966 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6967 return type. Return NULL_TREE if no simplification can be made. */
6970 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6974 if (!validate_arg (arg, COMPLEX_TYPE)
6975 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6978 /* Calculate the result when the argument is a constant. */
6979 if (TREE_CODE (arg) == COMPLEX_CST
6980 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6984 if (TREE_CODE (arg) == COMPLEX_EXPR)
6986 tree real = TREE_OPERAND (arg, 0);
6987 tree imag = TREE_OPERAND (arg, 1);
6989 /* If either part is zero, cabs is fabs of the other. */
6990 if (real_zerop (real))
6991 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6992 if (real_zerop (imag))
6993 return fold_build1_loc (loc, ABS_EXPR, type, real);
6995 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6996 if (flag_unsafe_math_optimizations
6997 && operand_equal_p (real, imag, OEP_PURE_SAME))
6999 const REAL_VALUE_TYPE sqrt2_trunc
7000 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7002 return fold_build2_loc (loc, MULT_EXPR, type,
7003 fold_build1_loc (loc, ABS_EXPR, type, real),
7004 build_real (type, sqrt2_trunc));
7008 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7009 if (TREE_CODE (arg) == NEGATE_EXPR
7010 || TREE_CODE (arg) == CONJ_EXPR)
7011 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7013 /* Don't do this when optimizing for size. */
7014 if (flag_unsafe_math_optimizations
7015 && optimize && optimize_function_for_speed_p (cfun))
7017 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7019 if (sqrtfn != NULL_TREE)
7021 tree rpart, ipart, result;
7023 arg = builtin_save_expr (arg);
7025 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7026 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7028 rpart = builtin_save_expr (rpart);
7029 ipart = builtin_save_expr (ipart);
7031 result = fold_build2_loc (loc, PLUS_EXPR, type,
7032 fold_build2_loc (loc, MULT_EXPR, type,
7034 fold_build2_loc (loc, MULT_EXPR, type,
7037 return build_call_expr_loc (loc, sqrtfn, 1, result);
7044 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7045 complex tree type of the result. If NEG is true, the imaginary
7046 zero is negative. */
7049 build_complex_cproj (tree type, bool neg)
7051 REAL_VALUE_TYPE rinf, rzero = dconst0;
7055 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7056 build_real (TREE_TYPE (type), rzero));
7059 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7060 return type. Return NULL_TREE if no simplification can be made. */
7063 fold_builtin_cproj (location_t loc, tree arg, tree type)
7065 if (!validate_arg (arg, COMPLEX_TYPE)
7066 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7069 /* If there are no infinities, return arg. */
7070 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7071 return non_lvalue_loc (loc, arg);
7073 /* Calculate the result when the argument is a constant. */
7074 if (TREE_CODE (arg) == COMPLEX_CST)
7076 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7077 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7079 if (real_isinf (real) || real_isinf (imag))
7080 return build_complex_cproj (type, imag->sign);
7084 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7086 tree real = TREE_OPERAND (arg, 0);
7087 tree imag = TREE_OPERAND (arg, 1);
7092 /* If the real part is inf and the imag part is known to be
7093 nonnegative, return (inf + 0i). Remember side-effects are
7094 possible in the imag part. */
7095 if (TREE_CODE (real) == REAL_CST
7096 && real_isinf (TREE_REAL_CST_PTR (real))
7097 && tree_expr_nonnegative_p (imag))
7098 return omit_one_operand_loc (loc, type,
7099 build_complex_cproj (type, false),
7102 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7103 Remember side-effects are possible in the real part. */
7104 if (TREE_CODE (imag) == REAL_CST
7105 && real_isinf (TREE_REAL_CST_PTR (imag)))
7107 omit_one_operand_loc (loc, type,
7108 build_complex_cproj (type, TREE_REAL_CST_PTR
7109 (imag)->sign), arg);
7115 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7116 Return NULL_TREE if no simplification can be made. */
7119 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7122 enum built_in_function fcode;
7125 if (!validate_arg (arg, REAL_TYPE))
7128 /* Calculate the result when the argument is a constant. */
7129 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7132 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7133 fcode = builtin_mathfn_code (arg);
7134 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7136 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7137 arg = fold_build2_loc (loc, MULT_EXPR, type,
7138 CALL_EXPR_ARG (arg, 0),
7139 build_real (type, dconsthalf));
7140 return build_call_expr_loc (loc, expfn, 1, arg);
7143 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7144 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7146 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7150 tree arg0 = CALL_EXPR_ARG (arg, 0);
7152 /* The inner root was either sqrt or cbrt. */
7153 /* This was a conditional expression but it triggered a bug
7155 REAL_VALUE_TYPE dconstroot;
7156 if (BUILTIN_SQRT_P (fcode))
7157 dconstroot = dconsthalf;
7159 dconstroot = dconst_third ();
7161 /* Adjust for the outer root. */
7162 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7163 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7164 tree_root = build_real (type, dconstroot);
7165 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7169 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7170 if (flag_unsafe_math_optimizations
7171 && (fcode == BUILT_IN_POW
7172 || fcode == BUILT_IN_POWF
7173 || fcode == BUILT_IN_POWL))
7175 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7176 tree arg0 = CALL_EXPR_ARG (arg, 0);
7177 tree arg1 = CALL_EXPR_ARG (arg, 1);
7179 if (!tree_expr_nonnegative_p (arg0))
7180 arg0 = build1 (ABS_EXPR, type, arg0);
7181 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7182 build_real (type, dconsthalf));
7183 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7189 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7190 Return NULL_TREE if no simplification can be made. */
7193 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7195 const enum built_in_function fcode = builtin_mathfn_code (arg);
7198 if (!validate_arg (arg, REAL_TYPE))
7201 /* Calculate the result when the argument is a constant. */
7202 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7205 if (flag_unsafe_math_optimizations)
7207 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7208 if (BUILTIN_EXPONENT_P (fcode))
7210 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7211 const REAL_VALUE_TYPE third_trunc =
7212 real_value_truncate (TYPE_MODE (type), dconst_third ());
7213 arg = fold_build2_loc (loc, MULT_EXPR, type,
7214 CALL_EXPR_ARG (arg, 0),
7215 build_real (type, third_trunc));
7216 return build_call_expr_loc (loc, expfn, 1, arg);
7219 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7220 if (BUILTIN_SQRT_P (fcode))
7222 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7226 tree arg0 = CALL_EXPR_ARG (arg, 0);
7228 REAL_VALUE_TYPE dconstroot = dconst_third ();
7230 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7231 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7232 tree_root = build_real (type, dconstroot);
7233 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7237 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7238 if (BUILTIN_CBRT_P (fcode))
7240 tree arg0 = CALL_EXPR_ARG (arg, 0);
7241 if (tree_expr_nonnegative_p (arg0))
7243 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7248 REAL_VALUE_TYPE dconstroot;
7250 real_arithmetic (&dconstroot, MULT_EXPR,
7251 dconst_third_ptr (), dconst_third_ptr ());
7252 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7253 tree_root = build_real (type, dconstroot);
7254 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7259 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7260 if (fcode == BUILT_IN_POW
7261 || fcode == BUILT_IN_POWF
7262 || fcode == BUILT_IN_POWL)
7264 tree arg00 = CALL_EXPR_ARG (arg, 0);
7265 tree arg01 = CALL_EXPR_ARG (arg, 1);
7266 if (tree_expr_nonnegative_p (arg00))
7268 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7269 const REAL_VALUE_TYPE dconstroot
7270 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7271 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7272 build_real (type, dconstroot));
7273 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7280 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7281 TYPE is the type of the return value. Return NULL_TREE if no
7282 simplification can be made. */
7285 fold_builtin_cos (location_t loc,
7286 tree arg, tree type, tree fndecl)
7290 if (!validate_arg (arg, REAL_TYPE))
7293 /* Calculate the result when the argument is a constant. */
7294 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7297 /* Optimize cos(-x) into cos (x). */
7298 if ((narg = fold_strip_sign_ops (arg)))
7299 return build_call_expr_loc (loc, fndecl, 1, narg);
7304 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7305 Return NULL_TREE if no simplification can be made. */
7308 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7310 if (validate_arg (arg, REAL_TYPE))
7314 /* Calculate the result when the argument is a constant. */
7315 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7318 /* Optimize cosh(-x) into cosh (x). */
7319 if ((narg = fold_strip_sign_ops (arg)))
7320 return build_call_expr_loc (loc, fndecl, 1, narg);
7326 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7327 argument ARG. TYPE is the type of the return value. Return
7328 NULL_TREE if no simplification can be made. */
7331 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7334 if (validate_arg (arg, COMPLEX_TYPE)
7335 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7339 /* Calculate the result when the argument is a constant. */
7340 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7343 /* Optimize fn(-x) into fn(x). */
7344 if ((tmp = fold_strip_sign_ops (arg)))
7345 return build_call_expr_loc (loc, fndecl, 1, tmp);
7351 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7352 Return NULL_TREE if no simplification can be made. */
7355 fold_builtin_tan (tree arg, tree type)
7357 enum built_in_function fcode;
7360 if (!validate_arg (arg, REAL_TYPE))
7363 /* Calculate the result when the argument is a constant. */
7364 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7367 /* Optimize tan(atan(x)) = x. */
7368 fcode = builtin_mathfn_code (arg);
7369 if (flag_unsafe_math_optimizations
7370 && (fcode == BUILT_IN_ATAN
7371 || fcode == BUILT_IN_ATANF
7372 || fcode == BUILT_IN_ATANL))
7373 return CALL_EXPR_ARG (arg, 0);
7378 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7379 NULL_TREE if no simplification can be made. */
7382 fold_builtin_sincos (location_t loc,
7383 tree arg0, tree arg1, tree arg2)
7388 if (!validate_arg (arg0, REAL_TYPE)
7389 || !validate_arg (arg1, POINTER_TYPE)
7390 || !validate_arg (arg2, POINTER_TYPE))
7393 type = TREE_TYPE (arg0);
7395 /* Calculate the result when the argument is a constant. */
7396 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7399 /* Canonicalize sincos to cexpi. */
7400 if (!TARGET_C99_FUNCTIONS)
7402 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7406 call = build_call_expr_loc (loc, fn, 1, arg0);
7407 call = builtin_save_expr (call);
7409 return build2 (COMPOUND_EXPR, void_type_node,
7410 build2 (MODIFY_EXPR, void_type_node,
7411 build_fold_indirect_ref_loc (loc, arg1),
7412 build1 (IMAGPART_EXPR, type, call)),
7413 build2 (MODIFY_EXPR, void_type_node,
7414 build_fold_indirect_ref_loc (loc, arg2),
7415 build1 (REALPART_EXPR, type, call)));
7418 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7419 NULL_TREE if no simplification can be made. */
7422 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7425 tree realp, imagp, ifn;
7428 if (!validate_arg (arg0, COMPLEX_TYPE)
7429 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7432 /* Calculate the result when the argument is a constant. */
7433 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7436 rtype = TREE_TYPE (TREE_TYPE (arg0));
7438 /* In case we can figure out the real part of arg0 and it is constant zero
7440 if (!TARGET_C99_FUNCTIONS)
7442 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7446 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7447 && real_zerop (realp))
7449 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7450 return build_call_expr_loc (loc, ifn, 1, narg);
7453 /* In case we can easily decompose real and imaginary parts split cexp
7454 to exp (r) * cexpi (i). */
7455 if (flag_unsafe_math_optimizations
7458 tree rfn, rcall, icall;
7460 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7464 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7468 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7469 icall = builtin_save_expr (icall);
7470 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7471 rcall = builtin_save_expr (rcall);
7472 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7473 fold_build2_loc (loc, MULT_EXPR, rtype,
7475 fold_build1_loc (loc, REALPART_EXPR,
7477 fold_build2_loc (loc, MULT_EXPR, rtype,
7479 fold_build1_loc (loc, IMAGPART_EXPR,
7486 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7487 Return NULL_TREE if no simplification can be made. */
7490 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7492 if (!validate_arg (arg, REAL_TYPE))
7495 /* Optimize trunc of constant value. */
7496 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7498 REAL_VALUE_TYPE r, x;
7499 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7501 x = TREE_REAL_CST (arg);
7502 real_trunc (&r, TYPE_MODE (type), &x);
7503 return build_real (type, r);
7506 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7509 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7510 Return NULL_TREE if no simplification can be made. */
7513 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7515 if (!validate_arg (arg, REAL_TYPE))
7518 /* Optimize floor of constant value. */
7519 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7523 x = TREE_REAL_CST (arg);
7524 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7526 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7529 real_floor (&r, TYPE_MODE (type), &x);
7530 return build_real (type, r);
7534 /* Fold floor (x) where x is nonnegative to trunc (x). */
7535 if (tree_expr_nonnegative_p (arg))
7537 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7539 return build_call_expr_loc (loc, truncfn, 1, arg);
7542 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7545 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7546 Return NULL_TREE if no simplification can be made. */
7549 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7551 if (!validate_arg (arg, REAL_TYPE))
7554 /* Optimize ceil of constant value. */
7555 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7559 x = TREE_REAL_CST (arg);
7560 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7562 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7565 real_ceil (&r, TYPE_MODE (type), &x);
7566 return build_real (type, r);
7570 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7573 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7574 Return NULL_TREE if no simplification can be made. */
7577 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7579 if (!validate_arg (arg, REAL_TYPE))
7582 /* Optimize round of constant value. */
7583 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7587 x = TREE_REAL_CST (arg);
7588 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7590 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7593 real_round (&r, TYPE_MODE (type), &x);
7594 return build_real (type, r);
7598 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7601 /* Fold function call to builtin lround, lroundf or lroundl (or the
7602 corresponding long long versions) and other rounding functions. ARG
7603 is the argument to the call. Return NULL_TREE if no simplification
7607 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7609 if (!validate_arg (arg, REAL_TYPE))
7612 /* Optimize lround of constant value. */
7613 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7615 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7617 if (real_isfinite (&x))
7619 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7620 tree ftype = TREE_TYPE (arg);
7621 unsigned HOST_WIDE_INT lo2;
7622 HOST_WIDE_INT hi, lo;
7625 switch (DECL_FUNCTION_CODE (fndecl))
7627 CASE_FLT_FN (BUILT_IN_LFLOOR):
7628 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7629 real_floor (&r, TYPE_MODE (ftype), &x);
7632 CASE_FLT_FN (BUILT_IN_LCEIL):
7633 CASE_FLT_FN (BUILT_IN_LLCEIL):
7634 real_ceil (&r, TYPE_MODE (ftype), &x);
7637 CASE_FLT_FN (BUILT_IN_LROUND):
7638 CASE_FLT_FN (BUILT_IN_LLROUND):
7639 real_round (&r, TYPE_MODE (ftype), &x);
7646 REAL_VALUE_TO_INT (&lo, &hi, r);
7647 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7648 return build_int_cst_wide (itype, lo2, hi);
7652 switch (DECL_FUNCTION_CODE (fndecl))
7654 CASE_FLT_FN (BUILT_IN_LFLOOR):
7655 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7656 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7657 if (tree_expr_nonnegative_p (arg))
7658 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7659 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7664 return fold_fixed_mathfn (loc, fndecl, arg);
7667 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7668 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7669 the argument to the call. Return NULL_TREE if no simplification can
7673 fold_builtin_bitop (tree fndecl, tree arg)
7675 if (!validate_arg (arg, INTEGER_TYPE))
7678 /* Optimize for constant argument. */
7679 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7681 HOST_WIDE_INT hi, width, result;
7682 unsigned HOST_WIDE_INT lo;
7685 type = TREE_TYPE (arg);
7686 width = TYPE_PRECISION (type);
7687 lo = TREE_INT_CST_LOW (arg);
7689 /* Clear all the bits that are beyond the type's precision. */
7690 if (width > HOST_BITS_PER_WIDE_INT)
7692 hi = TREE_INT_CST_HIGH (arg);
7693 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7694 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7699 if (width < HOST_BITS_PER_WIDE_INT)
7700 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7703 switch (DECL_FUNCTION_CODE (fndecl))
7705 CASE_INT_FN (BUILT_IN_FFS):
7707 result = exact_log2 (lo & -lo) + 1;
7709 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7714 CASE_INT_FN (BUILT_IN_CLZ):
7716 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7718 result = width - floor_log2 (lo) - 1;
7719 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7723 CASE_INT_FN (BUILT_IN_CTZ):
7725 result = exact_log2 (lo & -lo);
7727 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7728 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7732 CASE_INT_FN (BUILT_IN_POPCOUNT):
7735 result++, lo &= lo - 1;
7737 result++, hi &= hi - 1;
7740 CASE_INT_FN (BUILT_IN_PARITY):
7743 result++, lo &= lo - 1;
7745 result++, hi &= hi - 1;
7753 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7759 /* Fold function call to builtin_bswap and the long and long long
7760 variants. Return NULL_TREE if no simplification can be made. */
7762 fold_builtin_bswap (tree fndecl, tree arg)
7764 if (! validate_arg (arg, INTEGER_TYPE))
7767 /* Optimize constant value. */
7768 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7770 HOST_WIDE_INT hi, width, r_hi = 0;
7771 unsigned HOST_WIDE_INT lo, r_lo = 0;
7774 type = TREE_TYPE (arg);
7775 width = TYPE_PRECISION (type);
7776 lo = TREE_INT_CST_LOW (arg);
7777 hi = TREE_INT_CST_HIGH (arg);
7779 switch (DECL_FUNCTION_CODE (fndecl))
7781 case BUILT_IN_BSWAP32:
7782 case BUILT_IN_BSWAP64:
7786 for (s = 0; s < width; s += 8)
7788 int d = width - s - 8;
7789 unsigned HOST_WIDE_INT byte;
7791 if (s < HOST_BITS_PER_WIDE_INT)
7792 byte = (lo >> s) & 0xff;
7794 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7796 if (d < HOST_BITS_PER_WIDE_INT)
7799 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7809 if (width < HOST_BITS_PER_WIDE_INT)
7810 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7812 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7818 /* A subroutine of fold_builtin to fold the various logarithmic
7819 functions. Return NULL_TREE if no simplification can me made.
7820 FUNC is the corresponding MPFR logarithm function. */
7823 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7824 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7826 if (validate_arg (arg, REAL_TYPE))
7828 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7830 const enum built_in_function fcode = builtin_mathfn_code (arg);
7832 /* Calculate the result when the argument is a constant. */
7833 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7836 /* Special case, optimize logN(expN(x)) = x. */
7837 if (flag_unsafe_math_optimizations
7838 && ((func == mpfr_log
7839 && (fcode == BUILT_IN_EXP
7840 || fcode == BUILT_IN_EXPF
7841 || fcode == BUILT_IN_EXPL))
7842 || (func == mpfr_log2
7843 && (fcode == BUILT_IN_EXP2
7844 || fcode == BUILT_IN_EXP2F
7845 || fcode == BUILT_IN_EXP2L))
7846 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7847 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7849 /* Optimize logN(func()) for various exponential functions. We
7850 want to determine the value "x" and the power "exponent" in
7851 order to transform logN(x**exponent) into exponent*logN(x). */
7852 if (flag_unsafe_math_optimizations)
7854 tree exponent = 0, x = 0;
7858 CASE_FLT_FN (BUILT_IN_EXP):
7859 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7860 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7862 exponent = CALL_EXPR_ARG (arg, 0);
7864 CASE_FLT_FN (BUILT_IN_EXP2):
7865 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7866 x = build_real (type, dconst2);
7867 exponent = CALL_EXPR_ARG (arg, 0);
7869 CASE_FLT_FN (BUILT_IN_EXP10):
7870 CASE_FLT_FN (BUILT_IN_POW10):
7871 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7873 REAL_VALUE_TYPE dconst10;
7874 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7875 x = build_real (type, dconst10);
7877 exponent = CALL_EXPR_ARG (arg, 0);
7879 CASE_FLT_FN (BUILT_IN_SQRT):
7880 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7881 x = CALL_EXPR_ARG (arg, 0);
7882 exponent = build_real (type, dconsthalf);
7884 CASE_FLT_FN (BUILT_IN_CBRT):
7885 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7886 x = CALL_EXPR_ARG (arg, 0);
7887 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7890 CASE_FLT_FN (BUILT_IN_POW):
7891 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7892 x = CALL_EXPR_ARG (arg, 0);
7893 exponent = CALL_EXPR_ARG (arg, 1);
7899 /* Now perform the optimization. */
7902 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7903 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7911 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7912 NULL_TREE if no simplification can be made. */
7915 fold_builtin_hypot (location_t loc, tree fndecl,
7916 tree arg0, tree arg1, tree type)
7918 tree res, narg0, narg1;
7920 if (!validate_arg (arg0, REAL_TYPE)
7921 || !validate_arg (arg1, REAL_TYPE))
7924 /* Calculate the result when the argument is a constant. */
7925 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7928 /* If either argument to hypot has a negate or abs, strip that off.
7929 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7930 narg0 = fold_strip_sign_ops (arg0);
7931 narg1 = fold_strip_sign_ops (arg1);
7934 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7935 narg1 ? narg1 : arg1);
7938 /* If either argument is zero, hypot is fabs of the other. */
7939 if (real_zerop (arg0))
7940 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7941 else if (real_zerop (arg1))
7942 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7944 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7945 if (flag_unsafe_math_optimizations
7946 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7948 const REAL_VALUE_TYPE sqrt2_trunc
7949 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7950 return fold_build2_loc (loc, MULT_EXPR, type,
7951 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7952 build_real (type, sqrt2_trunc));
7959 /* Fold a builtin function call to pow, powf, or powl. Return
7960 NULL_TREE if no simplification can be made. */
7962 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7966 if (!validate_arg (arg0, REAL_TYPE)
7967 || !validate_arg (arg1, REAL_TYPE))
7970 /* Calculate the result when the argument is a constant. */
7971 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7974 /* Optimize pow(1.0,y) = 1.0. */
7975 if (real_onep (arg0))
7976 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7978 if (TREE_CODE (arg1) == REAL_CST
7979 && !TREE_OVERFLOW (arg1))
7981 REAL_VALUE_TYPE cint;
7985 c = TREE_REAL_CST (arg1);
7987 /* Optimize pow(x,0.0) = 1.0. */
7988 if (REAL_VALUES_EQUAL (c, dconst0))
7989 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7992 /* Optimize pow(x,1.0) = x. */
7993 if (REAL_VALUES_EQUAL (c, dconst1))
7996 /* Optimize pow(x,-1.0) = 1.0/x. */
7997 if (REAL_VALUES_EQUAL (c, dconstm1))
7998 return fold_build2_loc (loc, RDIV_EXPR, type,
7999 build_real (type, dconst1), arg0);
8001 /* Optimize pow(x,0.5) = sqrt(x). */
8002 if (flag_unsafe_math_optimizations
8003 && REAL_VALUES_EQUAL (c, dconsthalf))
8005 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8007 if (sqrtfn != NULL_TREE)
8008 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8011 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8012 if (flag_unsafe_math_optimizations)
8014 const REAL_VALUE_TYPE dconstroot
8015 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8017 if (REAL_VALUES_EQUAL (c, dconstroot))
8019 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8020 if (cbrtfn != NULL_TREE)
8021 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8025 /* Check for an integer exponent. */
8026 n = real_to_integer (&c);
8027 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8028 if (real_identical (&c, &cint))
8030 /* Attempt to evaluate pow at compile-time, unless this should
8031 raise an exception. */
8032 if (TREE_CODE (arg0) == REAL_CST
8033 && !TREE_OVERFLOW (arg0)
8035 || (!flag_trapping_math && !flag_errno_math)
8036 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8041 x = TREE_REAL_CST (arg0);
8042 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8043 if (flag_unsafe_math_optimizations || !inexact)
8044 return build_real (type, x);
8047 /* Strip sign ops from even integer powers. */
8048 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8050 tree narg0 = fold_strip_sign_ops (arg0);
8052 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8057 if (flag_unsafe_math_optimizations)
8059 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8061 /* Optimize pow(expN(x),y) = expN(x*y). */
8062 if (BUILTIN_EXPONENT_P (fcode))
8064 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8065 tree arg = CALL_EXPR_ARG (arg0, 0);
8066 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8067 return build_call_expr_loc (loc, expfn, 1, arg);
8070 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8071 if (BUILTIN_SQRT_P (fcode))
8073 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8074 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8075 build_real (type, dconsthalf));
8076 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8079 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8080 if (BUILTIN_CBRT_P (fcode))
8082 tree arg = CALL_EXPR_ARG (arg0, 0);
8083 if (tree_expr_nonnegative_p (arg))
8085 const REAL_VALUE_TYPE dconstroot
8086 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8087 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8088 build_real (type, dconstroot));
8089 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8093 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8094 if (fcode == BUILT_IN_POW
8095 || fcode == BUILT_IN_POWF
8096 || fcode == BUILT_IN_POWL)
8098 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8099 if (tree_expr_nonnegative_p (arg00))
8101 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8102 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8103 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8111 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8112 Return NULL_TREE if no simplification can be made. */
8114 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8115 tree arg0, tree arg1, tree type)
8117 if (!validate_arg (arg0, REAL_TYPE)
8118 || !validate_arg (arg1, INTEGER_TYPE))
8121 /* Optimize pow(1.0,y) = 1.0. */
8122 if (real_onep (arg0))
8123 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8125 if (host_integerp (arg1, 0))
8127 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8129 /* Evaluate powi at compile-time. */
8130 if (TREE_CODE (arg0) == REAL_CST
8131 && !TREE_OVERFLOW (arg0))
8134 x = TREE_REAL_CST (arg0);
8135 real_powi (&x, TYPE_MODE (type), &x, c);
8136 return build_real (type, x);
8139 /* Optimize pow(x,0) = 1.0. */
8141 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8144 /* Optimize pow(x,1) = x. */
8148 /* Optimize pow(x,-1) = 1.0/x. */
8150 return fold_build2_loc (loc, RDIV_EXPR, type,
8151 build_real (type, dconst1), arg0);
8157 /* A subroutine of fold_builtin to fold the various exponent
8158 functions. Return NULL_TREE if no simplification can be made.
8159 FUNC is the corresponding MPFR exponent function. */
8162 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8163 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8165 if (validate_arg (arg, REAL_TYPE))
8167 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8170 /* Calculate the result when the argument is a constant. */
8171 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8174 /* Optimize expN(logN(x)) = x. */
8175 if (flag_unsafe_math_optimizations)
8177 const enum built_in_function fcode = builtin_mathfn_code (arg);
8179 if ((func == mpfr_exp
8180 && (fcode == BUILT_IN_LOG
8181 || fcode == BUILT_IN_LOGF
8182 || fcode == BUILT_IN_LOGL))
8183 || (func == mpfr_exp2
8184 && (fcode == BUILT_IN_LOG2
8185 || fcode == BUILT_IN_LOG2F
8186 || fcode == BUILT_IN_LOG2L))
8187 || (func == mpfr_exp10
8188 && (fcode == BUILT_IN_LOG10
8189 || fcode == BUILT_IN_LOG10F
8190 || fcode == BUILT_IN_LOG10L)))
8191 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8198 /* Return true if VAR is a VAR_DECL or a component thereof. */
8201 var_decl_component_p (tree var)
8204 while (handled_component_p (inner))
8205 inner = TREE_OPERAND (inner, 0);
8206 return SSA_VAR_P (inner);
8209 /* Fold function call to builtin memset. Return
8210 NULL_TREE if no simplification can be made. */
8213 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8214 tree type, bool ignore)
8216 tree var, ret, etype;
8217 unsigned HOST_WIDE_INT length, cval;
8219 if (! validate_arg (dest, POINTER_TYPE)
8220 || ! validate_arg (c, INTEGER_TYPE)
8221 || ! validate_arg (len, INTEGER_TYPE))
8224 if (! host_integerp (len, 1))
8227 /* If the LEN parameter is zero, return DEST. */
8228 if (integer_zerop (len))
8229 return omit_one_operand_loc (loc, type, dest, c);
8231 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8236 if (TREE_CODE (var) != ADDR_EXPR)
8239 var = TREE_OPERAND (var, 0);
8240 if (TREE_THIS_VOLATILE (var))
8243 etype = TREE_TYPE (var);
8244 if (TREE_CODE (etype) == ARRAY_TYPE)
8245 etype = TREE_TYPE (etype);
8247 if (!INTEGRAL_TYPE_P (etype)
8248 && !POINTER_TYPE_P (etype))
8251 if (! var_decl_component_p (var))
8254 length = tree_low_cst (len, 1);
8255 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8256 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8260 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8263 if (integer_zerop (c))
8267 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8270 cval = tree_low_cst (c, 1);
8274 cval |= (cval << 31) << 1;
8277 ret = build_int_cst_type (etype, cval);
8278 var = build_fold_indirect_ref_loc (loc,
8279 fold_convert_loc (loc,
8280 build_pointer_type (etype),
8282 ret = build2 (MODIFY_EXPR, etype, var, ret);
8286 return omit_one_operand_loc (loc, type, dest, ret);
8289 /* Fold function call to builtin memset. Return
8290 NULL_TREE if no simplification can be made. */
8293 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8295 if (! validate_arg (dest, POINTER_TYPE)
8296 || ! validate_arg (size, INTEGER_TYPE))
8302 /* New argument list transforming bzero(ptr x, int y) to
8303 memset(ptr x, int 0, size_t y). This is done this way
8304 so that if it isn't expanded inline, we fallback to
8305 calling bzero instead of memset. */
8307 return fold_builtin_memset (loc, dest, integer_zero_node,
8308 fold_convert_loc (loc, sizetype, size),
8309 void_type_node, ignore);
8312 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8313 NULL_TREE if no simplification can be made.
8314 If ENDP is 0, return DEST (like memcpy).
8315 If ENDP is 1, return DEST+LEN (like mempcpy).
8316 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8317 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8321 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8322 tree len, tree type, bool ignore, int endp)
8324 tree destvar, srcvar, expr;
8326 if (! validate_arg (dest, POINTER_TYPE)
8327 || ! validate_arg (src, POINTER_TYPE)
8328 || ! validate_arg (len, INTEGER_TYPE))
8331 /* If the LEN parameter is zero, return DEST. */
8332 if (integer_zerop (len))
8333 return omit_one_operand_loc (loc, type, dest, src);
8335 /* If SRC and DEST are the same (and not volatile), return
8336 DEST{,+LEN,+LEN-1}. */
8337 if (operand_equal_p (src, dest, 0))
8341 tree srctype, desttype;
8342 int src_align, dest_align;
8346 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8347 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8349 /* Both DEST and SRC must be pointer types.
8350 ??? This is what old code did. Is the testing for pointer types
8353 If either SRC is readonly or length is 1, we can use memcpy. */
8354 if (!dest_align || !src_align)
8356 if (readonly_data_expr (src)
8357 || (host_integerp (len, 1)
8358 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8359 >= tree_low_cst (len, 1))))
8361 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8364 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8367 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8368 srcvar = build_fold_indirect_ref_loc (loc, src);
8369 destvar = build_fold_indirect_ref_loc (loc, dest);
8371 && !TREE_THIS_VOLATILE (srcvar)
8373 && !TREE_THIS_VOLATILE (destvar))
8375 tree src_base, dest_base, fn;
8376 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8377 HOST_WIDE_INT size = -1;
8378 HOST_WIDE_INT maxsize = -1;
8381 if (handled_component_p (src_base))
8382 src_base = get_ref_base_and_extent (src_base, &src_offset,
8384 dest_base = destvar;
8385 if (handled_component_p (dest_base))
8386 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8388 if (host_integerp (len, 1))
8390 maxsize = tree_low_cst (len, 1);
8392 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8395 maxsize *= BITS_PER_UNIT;
8399 if (SSA_VAR_P (src_base)
8400 && SSA_VAR_P (dest_base))
8402 if (operand_equal_p (src_base, dest_base, 0)
8403 && ranges_overlap_p (src_offset, maxsize,
8404 dest_offset, maxsize))
8407 else if (TREE_CODE (src_base) == INDIRECT_REF
8408 && TREE_CODE (dest_base) == INDIRECT_REF)
8410 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8411 TREE_OPERAND (dest_base, 0), 0)
8412 || ranges_overlap_p (src_offset, maxsize,
8413 dest_offset, maxsize))
8419 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8422 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8427 if (!host_integerp (len, 0))
8430 This logic lose for arguments like (type *)malloc (sizeof (type)),
8431 since we strip the casts of up to VOID return value from malloc.
8432 Perhaps we ought to inherit type from non-VOID argument here? */
8435 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8436 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8438 tree tem = TREE_OPERAND (src, 0);
8440 if (tem != TREE_OPERAND (src, 0))
8441 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8443 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8445 tree tem = TREE_OPERAND (dest, 0);
8447 if (tem != TREE_OPERAND (dest, 0))
8448 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8450 srctype = TREE_TYPE (TREE_TYPE (src));
8452 && TREE_CODE (srctype) == ARRAY_TYPE
8453 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8455 srctype = TREE_TYPE (srctype);
8457 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8459 desttype = TREE_TYPE (TREE_TYPE (dest));
8461 && TREE_CODE (desttype) == ARRAY_TYPE
8462 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8464 desttype = TREE_TYPE (desttype);
8466 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8468 if (!srctype || !desttype
8469 || !TYPE_SIZE_UNIT (srctype)
8470 || !TYPE_SIZE_UNIT (desttype)
8471 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8472 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8473 || TYPE_VOLATILE (srctype)
8474 || TYPE_VOLATILE (desttype))
8477 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8478 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8479 if (dest_align < (int) TYPE_ALIGN (desttype)
8480 || src_align < (int) TYPE_ALIGN (srctype))
8484 dest = builtin_save_expr (dest);
8487 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8489 srcvar = build_fold_indirect_ref_loc (loc, src);
8490 if (TREE_THIS_VOLATILE (srcvar))
8492 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8494 /* With memcpy, it is possible to bypass aliasing rules, so without
8495 this check i.e. execute/20060930-2.c would be misoptimized,
8496 because it use conflicting alias set to hold argument for the
8497 memcpy call. This check is probably unnecessary with
8498 -fno-strict-aliasing. Similarly for destvar. See also
8500 else if (!var_decl_component_p (srcvar))
8504 destvar = NULL_TREE;
8505 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8507 destvar = build_fold_indirect_ref_loc (loc, dest);
8508 if (TREE_THIS_VOLATILE (destvar))
8510 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8511 destvar = NULL_TREE;
8512 else if (!var_decl_component_p (destvar))
8513 destvar = NULL_TREE;
8516 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8519 if (srcvar == NULL_TREE)
8522 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8525 srctype = build_qualified_type (desttype, 0);
8526 if (src_align < (int) TYPE_ALIGN (srctype))
8528 if (AGGREGATE_TYPE_P (srctype)
8529 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8532 srctype = build_variant_type_copy (srctype);
8533 TYPE_ALIGN (srctype) = src_align;
8534 TYPE_USER_ALIGN (srctype) = 1;
8535 TYPE_PACKED (srctype) = 1;
8537 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8538 src = fold_convert_loc (loc, srcptype, src);
8539 srcvar = build_fold_indirect_ref_loc (loc, src);
8541 else if (destvar == NULL_TREE)
8544 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8547 desttype = build_qualified_type (srctype, 0);
8548 if (dest_align < (int) TYPE_ALIGN (desttype))
8550 if (AGGREGATE_TYPE_P (desttype)
8551 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8554 desttype = build_variant_type_copy (desttype);
8555 TYPE_ALIGN (desttype) = dest_align;
8556 TYPE_USER_ALIGN (desttype) = 1;
8557 TYPE_PACKED (desttype) = 1;
8559 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8560 dest = fold_convert_loc (loc, destptype, dest);
8561 destvar = build_fold_indirect_ref_loc (loc, dest);
8564 if (srctype == desttype
8565 || (gimple_in_ssa_p (cfun)
8566 && useless_type_conversion_p (desttype, srctype)))
8568 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8569 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8570 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8571 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8572 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8574 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8575 TREE_TYPE (destvar), srcvar);
8576 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8582 if (endp == 0 || endp == 3)
8583 return omit_one_operand_loc (loc, type, dest, expr);
8589 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8592 len = fold_convert_loc (loc, sizetype, len);
8593 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8594 dest = fold_convert_loc (loc, type, dest);
8596 dest = omit_one_operand_loc (loc, type, dest, expr);
8600 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8601 If LEN is not NULL, it represents the length of the string to be
8602 copied. Return NULL_TREE if no simplification can be made. */
8605 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8609 if (!validate_arg (dest, POINTER_TYPE)
8610 || !validate_arg (src, POINTER_TYPE))
8613 /* If SRC and DEST are the same (and not volatile), return DEST. */
8614 if (operand_equal_p (src, dest, 0))
8615 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8617 if (optimize_function_for_size_p (cfun))
8620 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8626 len = c_strlen (src, 1);
8627 if (! len || TREE_SIDE_EFFECTS (len))
8631 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8632 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8633 build_call_expr_loc (loc, fn, 3, dest, src, len));
8636 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8637 Return NULL_TREE if no simplification can be made. */
8640 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8642 tree fn, len, lenp1, call, type;
8644 if (!validate_arg (dest, POINTER_TYPE)
8645 || !validate_arg (src, POINTER_TYPE))
8648 len = c_strlen (src, 1);
8650 || TREE_CODE (len) != INTEGER_CST)
8653 if (optimize_function_for_size_p (cfun)
8654 /* If length is zero it's small enough. */
8655 && !integer_zerop (len))
8658 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8662 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8663 /* We use dest twice in building our expression. Save it from
8664 multiple expansions. */
8665 dest = builtin_save_expr (dest);
8666 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8668 type = TREE_TYPE (TREE_TYPE (fndecl));
8669 len = fold_convert_loc (loc, sizetype, len);
8670 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8671 dest = fold_convert_loc (loc, type, dest);
8672 dest = omit_one_operand_loc (loc, type, dest, call);
8676 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8677 If SLEN is not NULL, it represents the length of the source string.
8678 Return NULL_TREE if no simplification can be made. */
8681 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8682 tree src, tree len, tree slen)
8686 if (!validate_arg (dest, POINTER_TYPE)
8687 || !validate_arg (src, POINTER_TYPE)
8688 || !validate_arg (len, INTEGER_TYPE))
8691 /* If the LEN parameter is zero, return DEST. */
8692 if (integer_zerop (len))
8693 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8695 /* We can't compare slen with len as constants below if len is not a
8697 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8701 slen = c_strlen (src, 1);
8703 /* Now, we must be passed a constant src ptr parameter. */
8704 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8707 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8709 /* We do not support simplification of this case, though we do
8710 support it when expanding trees into RTL. */
8711 /* FIXME: generate a call to __builtin_memset. */
8712 if (tree_int_cst_lt (slen, len))
8715 /* OK transform into builtin memcpy. */
8716 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8719 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8720 build_call_expr_loc (loc, fn, 3, dest, src, len));
8723 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8724 arguments to the call, and TYPE is its return type.
8725 Return NULL_TREE if no simplification can be made. */
8728 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8730 if (!validate_arg (arg1, POINTER_TYPE)
8731 || !validate_arg (arg2, INTEGER_TYPE)
8732 || !validate_arg (len, INTEGER_TYPE))
8738 if (TREE_CODE (arg2) != INTEGER_CST
8739 || !host_integerp (len, 1))
8742 p1 = c_getstr (arg1);
8743 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8749 if (target_char_cast (arg2, &c))
8752 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8755 return build_int_cst (TREE_TYPE (arg1), 0);
8757 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8759 return fold_convert_loc (loc, type, tem);
8765 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8766 Return NULL_TREE if no simplification can be made. */
8769 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8771 const char *p1, *p2;
8773 if (!validate_arg (arg1, POINTER_TYPE)
8774 || !validate_arg (arg2, POINTER_TYPE)
8775 || !validate_arg (len, INTEGER_TYPE))
8778 /* If the LEN parameter is zero, return zero. */
8779 if (integer_zerop (len))
8780 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8783 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8784 if (operand_equal_p (arg1, arg2, 0))
8785 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8787 p1 = c_getstr (arg1);
8788 p2 = c_getstr (arg2);
8790 /* If all arguments are constant, and the value of len is not greater
8791 than the lengths of arg1 and arg2, evaluate at compile-time. */
8792 if (host_integerp (len, 1) && p1 && p2
8793 && compare_tree_int (len, strlen (p1) + 1) <= 0
8794 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8796 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8799 return integer_one_node;
8801 return integer_minus_one_node;
8803 return integer_zero_node;
8806 /* If len parameter is one, return an expression corresponding to
8807 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8808 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8810 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8811 tree cst_uchar_ptr_node
8812 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8815 = fold_convert_loc (loc, integer_type_node,
8816 build1 (INDIRECT_REF, cst_uchar_node,
8817 fold_convert_loc (loc,
8821 = fold_convert_loc (loc, integer_type_node,
8822 build1 (INDIRECT_REF, cst_uchar_node,
8823 fold_convert_loc (loc,
8826 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8832 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8833 Return NULL_TREE if no simplification can be made. */
8836 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8838 const char *p1, *p2;
8840 if (!validate_arg (arg1, POINTER_TYPE)
8841 || !validate_arg (arg2, POINTER_TYPE))
8844 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8845 if (operand_equal_p (arg1, arg2, 0))
8846 return integer_zero_node;
8848 p1 = c_getstr (arg1);
8849 p2 = c_getstr (arg2);
8853 const int i = strcmp (p1, p2);
8855 return integer_minus_one_node;
8857 return integer_one_node;
8859 return integer_zero_node;
8862 /* If the second arg is "", return *(const unsigned char*)arg1. */
8863 if (p2 && *p2 == '\0')
8865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8866 tree cst_uchar_ptr_node
8867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8869 return fold_convert_loc (loc, integer_type_node,
8870 build1 (INDIRECT_REF, cst_uchar_node,
8871 fold_convert_loc (loc,
8876 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8877 if (p1 && *p1 == '\0')
8879 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8880 tree cst_uchar_ptr_node
8881 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8884 = fold_convert_loc (loc, integer_type_node,
8885 build1 (INDIRECT_REF, cst_uchar_node,
8886 fold_convert_loc (loc,
8889 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8895 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8896 Return NULL_TREE if no simplification can be made. */
8899 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8901 const char *p1, *p2;
8903 if (!validate_arg (arg1, POINTER_TYPE)
8904 || !validate_arg (arg2, POINTER_TYPE)
8905 || !validate_arg (len, INTEGER_TYPE))
8908 /* If the LEN parameter is zero, return zero. */
8909 if (integer_zerop (len))
8910 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8913 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8914 if (operand_equal_p (arg1, arg2, 0))
8915 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8917 p1 = c_getstr (arg1);
8918 p2 = c_getstr (arg2);
8920 if (host_integerp (len, 1) && p1 && p2)
8922 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8924 return integer_one_node;
8926 return integer_minus_one_node;
8928 return integer_zero_node;
8931 /* If the second arg is "", and the length is greater than zero,
8932 return *(const unsigned char*)arg1. */
8933 if (p2 && *p2 == '\0'
8934 && TREE_CODE (len) == INTEGER_CST
8935 && tree_int_cst_sgn (len) == 1)
8937 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8938 tree cst_uchar_ptr_node
8939 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8941 return fold_convert_loc (loc, integer_type_node,
8942 build1 (INDIRECT_REF, cst_uchar_node,
8943 fold_convert_loc (loc,
8948 /* If the first arg is "", and the length is greater than zero,
8949 return -*(const unsigned char*)arg2. */
8950 if (p1 && *p1 == '\0'
8951 && TREE_CODE (len) == INTEGER_CST
8952 && tree_int_cst_sgn (len) == 1)
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8958 tree temp = fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8963 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8966 /* If len parameter is one, return an expression corresponding to
8967 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8968 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8970 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8971 tree cst_uchar_ptr_node
8972 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8974 tree ind1 = fold_convert_loc (loc, integer_type_node,
8975 build1 (INDIRECT_REF, cst_uchar_node,
8976 fold_convert_loc (loc,
8979 tree ind2 = fold_convert_loc (loc, integer_type_node,
8980 build1 (INDIRECT_REF, cst_uchar_node,
8981 fold_convert_loc (loc,
8984 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8990 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8991 ARG. Return NULL_TREE if no simplification can be made. */
8994 fold_builtin_signbit (location_t loc, tree arg, tree type)
8998 if (!validate_arg (arg, REAL_TYPE))
9001 /* If ARG is a compile-time constant, determine the result. */
9002 if (TREE_CODE (arg) == REAL_CST
9003 && !TREE_OVERFLOW (arg))
9007 c = TREE_REAL_CST (arg);
9008 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9009 return fold_convert_loc (loc, type, temp);
9012 /* If ARG is non-negative, the result is always zero. */
9013 if (tree_expr_nonnegative_p (arg))
9014 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9016 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9017 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9018 return fold_build2_loc (loc, LT_EXPR, type, arg,
9019 build_real (TREE_TYPE (arg), dconst0));
9024 /* Fold function call to builtin copysign, copysignf or copysignl with
9025 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9029 fold_builtin_copysign (location_t loc, tree fndecl,
9030 tree arg1, tree arg2, tree type)
9034 if (!validate_arg (arg1, REAL_TYPE)
9035 || !validate_arg (arg2, REAL_TYPE))
9038 /* copysign(X,X) is X. */
9039 if (operand_equal_p (arg1, arg2, 0))
9040 return fold_convert_loc (loc, type, arg1);
9042 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9043 if (TREE_CODE (arg1) == REAL_CST
9044 && TREE_CODE (arg2) == REAL_CST
9045 && !TREE_OVERFLOW (arg1)
9046 && !TREE_OVERFLOW (arg2))
9048 REAL_VALUE_TYPE c1, c2;
9050 c1 = TREE_REAL_CST (arg1);
9051 c2 = TREE_REAL_CST (arg2);
9052 /* c1.sign := c2.sign. */
9053 real_copysign (&c1, &c2);
9054 return build_real (type, c1);
9057 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9058 Remember to evaluate Y for side-effects. */
9059 if (tree_expr_nonnegative_p (arg2))
9060 return omit_one_operand_loc (loc, type,
9061 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9064 /* Strip sign changing operations for the first argument. */
9065 tem = fold_strip_sign_ops (arg1);
9067 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9072 /* Fold a call to builtin isascii with argument ARG. */
9075 fold_builtin_isascii (location_t loc, tree arg)
9077 if (!validate_arg (arg, INTEGER_TYPE))
9081 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9082 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9083 build_int_cst (NULL_TREE,
9084 ~ (unsigned HOST_WIDE_INT) 0x7f));
9085 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9086 arg, integer_zero_node);
9090 /* Fold a call to builtin toascii with argument ARG. */
9093 fold_builtin_toascii (location_t loc, tree arg)
9095 if (!validate_arg (arg, INTEGER_TYPE))
9098 /* Transform toascii(c) -> (c & 0x7f). */
9099 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9100 build_int_cst (NULL_TREE, 0x7f));
9103 /* Fold a call to builtin isdigit with argument ARG. */
9106 fold_builtin_isdigit (location_t loc, tree arg)
9108 if (!validate_arg (arg, INTEGER_TYPE))
9112 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9113 /* According to the C standard, isdigit is unaffected by locale.
9114 However, it definitely is affected by the target character set. */
9115 unsigned HOST_WIDE_INT target_digit0
9116 = lang_hooks.to_target_charset ('0');
9118 if (target_digit0 == 0)
9121 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9122 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9123 build_int_cst (unsigned_type_node, target_digit0));
9124 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9125 build_int_cst (unsigned_type_node, 9));
9129 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9132 fold_builtin_fabs (location_t loc, tree arg, tree type)
9134 if (!validate_arg (arg, REAL_TYPE))
9137 arg = fold_convert_loc (loc, type, arg);
9138 if (TREE_CODE (arg) == REAL_CST)
9139 return fold_abs_const (arg, type);
9140 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9143 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9146 fold_builtin_abs (location_t loc, tree arg, tree type)
9148 if (!validate_arg (arg, INTEGER_TYPE))
9151 arg = fold_convert_loc (loc, type, arg);
9152 if (TREE_CODE (arg) == INTEGER_CST)
9153 return fold_abs_const (arg, type);
9154 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9157 /* Fold a call to builtin fmin or fmax. */
9160 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9161 tree type, bool max)
9163 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9165 /* Calculate the result when the argument is a constant. */
9166 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9171 /* If either argument is NaN, return the other one. Avoid the
9172 transformation if we get (and honor) a signalling NaN. Using
9173 omit_one_operand() ensures we create a non-lvalue. */
9174 if (TREE_CODE (arg0) == REAL_CST
9175 && real_isnan (&TREE_REAL_CST (arg0))
9176 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9177 || ! TREE_REAL_CST (arg0).signalling))
9178 return omit_one_operand_loc (loc, type, arg1, arg0);
9179 if (TREE_CODE (arg1) == REAL_CST
9180 && real_isnan (&TREE_REAL_CST (arg1))
9181 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9182 || ! TREE_REAL_CST (arg1).signalling))
9183 return omit_one_operand_loc (loc, type, arg0, arg1);
9185 /* Transform fmin/fmax(x,x) -> x. */
9186 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9187 return omit_one_operand_loc (loc, type, arg0, arg1);
9189 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9190 functions to return the numeric arg if the other one is NaN.
9191 These tree codes don't honor that, so only transform if
9192 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9193 handled, so we don't have to worry about it either. */
9194 if (flag_finite_math_only)
9195 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9196 fold_convert_loc (loc, type, arg0),
9197 fold_convert_loc (loc, type, arg1));
9202 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9205 fold_builtin_carg (location_t loc, tree arg, tree type)
9207 if (validate_arg (arg, COMPLEX_TYPE)
9208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9210 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9214 tree new_arg = builtin_save_expr (arg);
9215 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9216 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9217 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9224 /* Fold a call to builtin logb/ilogb. */
9227 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9229 if (! validate_arg (arg, REAL_TYPE))
9234 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9236 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9242 /* If arg is Inf or NaN and we're logb, return it. */
9243 if (TREE_CODE (rettype) == REAL_TYPE)
9244 return fold_convert_loc (loc, rettype, arg);
9245 /* Fall through... */
9247 /* Zero may set errno and/or raise an exception for logb, also
9248 for ilogb we don't know FP_ILOGB0. */
9251 /* For normal numbers, proceed iff radix == 2. In GCC,
9252 normalized significands are in the range [0.5, 1.0). We
9253 want the exponent as if they were [1.0, 2.0) so get the
9254 exponent and subtract 1. */
9255 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9256 return fold_convert_loc (loc, rettype,
9257 build_int_cst (NULL_TREE,
9258 REAL_EXP (value)-1));
9266 /* Fold a call to builtin significand, if radix == 2. */
9269 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9271 if (! validate_arg (arg, REAL_TYPE))
9276 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9278 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9285 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9286 return fold_convert_loc (loc, rettype, arg);
9288 /* For normal numbers, proceed iff radix == 2. */
9289 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9291 REAL_VALUE_TYPE result = *value;
9292 /* In GCC, normalized significands are in the range [0.5,
9293 1.0). We want them to be [1.0, 2.0) so set the
9295 SET_REAL_EXP (&result, 1);
9296 return build_real (rettype, result);
9305 /* Fold a call to builtin frexp, we can assume the base is 2. */
9308 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9310 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9315 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9318 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9320 /* Proceed if a valid pointer type was passed in. */
9321 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9323 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9329 /* For +-0, return (*exp = 0, +-0). */
9330 exp = integer_zero_node;
9335 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9336 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9339 /* Since the frexp function always expects base 2, and in
9340 GCC normalized significands are already in the range
9341 [0.5, 1.0), we have exactly what frexp wants. */
9342 REAL_VALUE_TYPE frac_rvt = *value;
9343 SET_REAL_EXP (&frac_rvt, 0);
9344 frac = build_real (rettype, frac_rvt);
9345 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9352 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9353 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9354 TREE_SIDE_EFFECTS (arg1) = 1;
9355 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9361 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9362 then we can assume the base is two. If it's false, then we have to
9363 check the mode of the TYPE parameter in certain cases. */
9366 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9367 tree type, bool ldexp)
9369 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9374 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9375 if (real_zerop (arg0) || integer_zerop (arg1)
9376 || (TREE_CODE (arg0) == REAL_CST
9377 && !real_isfinite (&TREE_REAL_CST (arg0))))
9378 return omit_one_operand_loc (loc, type, arg0, arg1);
9380 /* If both arguments are constant, then try to evaluate it. */
9381 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9382 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9383 && host_integerp (arg1, 0))
9385 /* Bound the maximum adjustment to twice the range of the
9386 mode's valid exponents. Use abs to ensure the range is
9387 positive as a sanity check. */
9388 const long max_exp_adj = 2 *
9389 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9390 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9392 /* Get the user-requested adjustment. */
9393 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9395 /* The requested adjustment must be inside this range. This
9396 is a preliminary cap to avoid things like overflow, we
9397 may still fail to compute the result for other reasons. */
9398 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9400 REAL_VALUE_TYPE initial_result;
9402 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9404 /* Ensure we didn't overflow. */
9405 if (! real_isinf (&initial_result))
9407 const REAL_VALUE_TYPE trunc_result
9408 = real_value_truncate (TYPE_MODE (type), initial_result);
9410 /* Only proceed if the target mode can hold the
9412 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9413 return build_real (type, trunc_result);
9422 /* Fold a call to builtin modf. */
9425 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9427 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9432 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9435 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9437 /* Proceed if a valid pointer type was passed in. */
9438 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9440 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9441 REAL_VALUE_TYPE trunc, frac;
9447 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9448 trunc = frac = *value;
9451 /* For +-Inf, return (*arg1 = arg0, +-0). */
9453 frac.sign = value->sign;
9457 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9458 real_trunc (&trunc, VOIDmode, value);
9459 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9460 /* If the original number was negative and already
9461 integral, then the fractional part is -0.0. */
9462 if (value->sign && frac.cl == rvc_zero)
9463 frac.sign = value->sign;
9467 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9468 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9469 build_real (rettype, trunc));
9470 TREE_SIDE_EFFECTS (arg1) = 1;
9471 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9472 build_real (rettype, frac));
9478 /* Given a location LOC, an interclass builtin function decl FNDECL
9479 and its single argument ARG, return an folded expression computing
9480 the same, or NULL_TREE if we either couldn't or didn't want to fold
9481 (the latter happen if there's an RTL instruction available). */
9484 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9486 enum machine_mode mode;
9488 if (!validate_arg (arg, REAL_TYPE))
9491 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9494 mode = TYPE_MODE (TREE_TYPE (arg));
9496 /* If there is no optab, try generic code. */
9497 switch (DECL_FUNCTION_CODE (fndecl))
9501 CASE_FLT_FN (BUILT_IN_ISINF):
9503 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9504 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9505 tree const type = TREE_TYPE (arg);
9509 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9510 real_from_string (&r, buf);
9511 result = build_call_expr (isgr_fn, 2,
9512 fold_build1_loc (loc, ABS_EXPR, type, arg),
9513 build_real (type, r));
9516 CASE_FLT_FN (BUILT_IN_FINITE):
9517 case BUILT_IN_ISFINITE:
9519 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9520 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9521 tree const type = TREE_TYPE (arg);
9525 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9526 real_from_string (&r, buf);
9527 result = build_call_expr (isle_fn, 2,
9528 fold_build1_loc (loc, ABS_EXPR, type, arg),
9529 build_real (type, r));
9530 /*result = fold_build2_loc (loc, UNGT_EXPR,
9531 TREE_TYPE (TREE_TYPE (fndecl)),
9532 fold_build1_loc (loc, ABS_EXPR, type, arg),
9533 build_real (type, r));
9534 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9535 TREE_TYPE (TREE_TYPE (fndecl)),
9539 case BUILT_IN_ISNORMAL:
9541 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9542 islessequal(fabs(x),DBL_MAX). */
9543 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9544 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9545 tree const type = TREE_TYPE (arg);
9546 REAL_VALUE_TYPE rmax, rmin;
9549 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9550 real_from_string (&rmax, buf);
9551 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9552 real_from_string (&rmin, buf);
9553 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9554 result = build_call_expr (isle_fn, 2, arg,
9555 build_real (type, rmax));
9556 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9557 build_call_expr (isge_fn, 2, arg,
9558 build_real (type, rmin)));
9568 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9569 ARG is the argument for the call. */
9572 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9574 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9577 if (!validate_arg (arg, REAL_TYPE))
9580 switch (builtin_index)
9582 case BUILT_IN_ISINF:
9583 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9584 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9586 if (TREE_CODE (arg) == REAL_CST)
9588 r = TREE_REAL_CST (arg);
9589 if (real_isinf (&r))
9590 return real_compare (GT_EXPR, &r, &dconst0)
9591 ? integer_one_node : integer_minus_one_node;
9593 return integer_zero_node;
9598 case BUILT_IN_ISINF_SIGN:
9600 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9601 /* In a boolean context, GCC will fold the inner COND_EXPR to
9602 1. So e.g. "if (isinf_sign(x))" would be folded to just
9603 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9604 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9605 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9606 tree tmp = NULL_TREE;
9608 arg = builtin_save_expr (arg);
9610 if (signbit_fn && isinf_fn)
9612 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9613 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9615 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9616 signbit_call, integer_zero_node);
9617 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9618 isinf_call, integer_zero_node);
9620 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9621 integer_minus_one_node, integer_one_node);
9622 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9630 case BUILT_IN_ISFINITE:
9631 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9632 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9633 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9635 if (TREE_CODE (arg) == REAL_CST)
9637 r = TREE_REAL_CST (arg);
9638 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9643 case BUILT_IN_ISNAN:
9644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9645 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9647 if (TREE_CODE (arg) == REAL_CST)
9649 r = TREE_REAL_CST (arg);
9650 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9653 arg = builtin_save_expr (arg);
9654 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9661 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9662 This builtin will generate code to return the appropriate floating
9663 point classification depending on the value of the floating point
9664 number passed in. The possible return values must be supplied as
9665 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9666 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9667 one floating point argument which is "type generic". */
9670 fold_builtin_fpclassify (location_t loc, tree exp)
9672 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9673 arg, type, res, tmp;
9674 enum machine_mode mode;
9678 /* Verify the required arguments in the original call. */
9679 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9680 INTEGER_TYPE, INTEGER_TYPE,
9681 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9684 fp_nan = CALL_EXPR_ARG (exp, 0);
9685 fp_infinite = CALL_EXPR_ARG (exp, 1);
9686 fp_normal = CALL_EXPR_ARG (exp, 2);
9687 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9688 fp_zero = CALL_EXPR_ARG (exp, 4);
9689 arg = CALL_EXPR_ARG (exp, 5);
9690 type = TREE_TYPE (arg);
9691 mode = TYPE_MODE (type);
9692 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9696 (fabs(x) == Inf ? FP_INFINITE :
9697 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9698 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9700 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9701 build_real (type, dconst0));
9702 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9703 tmp, fp_zero, fp_subnormal);
9705 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9706 real_from_string (&r, buf);
9707 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9708 arg, build_real (type, r));
9709 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9711 if (HONOR_INFINITIES (mode))
9714 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9715 build_real (type, r));
9716 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9720 if (HONOR_NANS (mode))
9722 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9723 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9729 /* Fold a call to an unordered comparison function such as
9730 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9731 being called and ARG0 and ARG1 are the arguments for the call.
9732 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9733 the opposite of the desired result. UNORDERED_CODE is used
9734 for modes that can hold NaNs and ORDERED_CODE is used for
9738 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9739 enum tree_code unordered_code,
9740 enum tree_code ordered_code)
9742 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9743 enum tree_code code;
9745 enum tree_code code0, code1;
9746 tree cmp_type = NULL_TREE;
9748 type0 = TREE_TYPE (arg0);
9749 type1 = TREE_TYPE (arg1);
9751 code0 = TREE_CODE (type0);
9752 code1 = TREE_CODE (type1);
9754 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9755 /* Choose the wider of two real types. */
9756 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9758 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9760 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9763 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9764 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9766 if (unordered_code == UNORDERED_EXPR)
9768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9769 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9770 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9773 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9775 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9776 fold_build2_loc (loc, code, type, arg0, arg1));
9779 /* Fold a call to built-in function FNDECL with 0 arguments.
9780 IGNORE is true if the result of the function call is ignored. This
9781 function returns NULL_TREE if no simplification was possible. */
9784 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9786 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9787 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9790 CASE_FLT_FN (BUILT_IN_INF):
9791 case BUILT_IN_INFD32:
9792 case BUILT_IN_INFD64:
9793 case BUILT_IN_INFD128:
9794 return fold_builtin_inf (loc, type, true);
9796 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9797 return fold_builtin_inf (loc, type, false);
9799 case BUILT_IN_CLASSIFY_TYPE:
9800 return fold_builtin_classify_type (NULL_TREE);
9808 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9809 IGNORE is true if the result of the function call is ignored. This
9810 function returns NULL_TREE if no simplification was possible. */
9813 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9815 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9816 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9819 case BUILT_IN_CONSTANT_P:
9821 tree val = fold_builtin_constant_p (arg0);
9823 /* Gimplification will pull the CALL_EXPR for the builtin out of
9824 an if condition. When not optimizing, we'll not CSE it back.
9825 To avoid link error types of regressions, return false now. */
9826 if (!val && !optimize)
9827 val = integer_zero_node;
9832 case BUILT_IN_CLASSIFY_TYPE:
9833 return fold_builtin_classify_type (arg0);
9835 case BUILT_IN_STRLEN:
9836 return fold_builtin_strlen (loc, type, arg0);
9838 CASE_FLT_FN (BUILT_IN_FABS):
9839 return fold_builtin_fabs (loc, arg0, type);
9843 case BUILT_IN_LLABS:
9844 case BUILT_IN_IMAXABS:
9845 return fold_builtin_abs (loc, arg0, type);
9847 CASE_FLT_FN (BUILT_IN_CONJ):
9848 if (validate_arg (arg0, COMPLEX_TYPE)
9849 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9850 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9853 CASE_FLT_FN (BUILT_IN_CREAL):
9854 if (validate_arg (arg0, COMPLEX_TYPE)
9855 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9856 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9859 CASE_FLT_FN (BUILT_IN_CIMAG):
9860 if (validate_arg (arg0, COMPLEX_TYPE)
9861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9862 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9865 CASE_FLT_FN (BUILT_IN_CCOS):
9866 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9868 CASE_FLT_FN (BUILT_IN_CCOSH):
9869 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9871 CASE_FLT_FN (BUILT_IN_CPROJ):
9872 return fold_builtin_cproj(loc, arg0, type);
9874 CASE_FLT_FN (BUILT_IN_CSIN):
9875 if (validate_arg (arg0, COMPLEX_TYPE)
9876 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9877 return do_mpc_arg1 (arg0, type, mpc_sin);
9880 CASE_FLT_FN (BUILT_IN_CSINH):
9881 if (validate_arg (arg0, COMPLEX_TYPE)
9882 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9883 return do_mpc_arg1 (arg0, type, mpc_sinh);
9886 CASE_FLT_FN (BUILT_IN_CTAN):
9887 if (validate_arg (arg0, COMPLEX_TYPE)
9888 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9889 return do_mpc_arg1 (arg0, type, mpc_tan);
9892 CASE_FLT_FN (BUILT_IN_CTANH):
9893 if (validate_arg (arg0, COMPLEX_TYPE)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9895 return do_mpc_arg1 (arg0, type, mpc_tanh);
9898 CASE_FLT_FN (BUILT_IN_CLOG):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return do_mpc_arg1 (arg0, type, mpc_log);
9904 CASE_FLT_FN (BUILT_IN_CSQRT):
9905 if (validate_arg (arg0, COMPLEX_TYPE)
9906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9907 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9910 CASE_FLT_FN (BUILT_IN_CASIN):
9911 if (validate_arg (arg0, COMPLEX_TYPE)
9912 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9913 return do_mpc_arg1 (arg0, type, mpc_asin);
9916 CASE_FLT_FN (BUILT_IN_CACOS):
9917 if (validate_arg (arg0, COMPLEX_TYPE)
9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9919 return do_mpc_arg1 (arg0, type, mpc_acos);
9922 CASE_FLT_FN (BUILT_IN_CATAN):
9923 if (validate_arg (arg0, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9925 return do_mpc_arg1 (arg0, type, mpc_atan);
9928 CASE_FLT_FN (BUILT_IN_CASINH):
9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9931 return do_mpc_arg1 (arg0, type, mpc_asinh);
9934 CASE_FLT_FN (BUILT_IN_CACOSH):
9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9937 return do_mpc_arg1 (arg0, type, mpc_acosh);
9940 CASE_FLT_FN (BUILT_IN_CATANH):
9941 if (validate_arg (arg0, COMPLEX_TYPE)
9942 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9943 return do_mpc_arg1 (arg0, type, mpc_atanh);
9946 CASE_FLT_FN (BUILT_IN_CABS):
9947 return fold_builtin_cabs (loc, arg0, type, fndecl);
9949 CASE_FLT_FN (BUILT_IN_CARG):
9950 return fold_builtin_carg (loc, arg0, type);
9952 CASE_FLT_FN (BUILT_IN_SQRT):
9953 return fold_builtin_sqrt (loc, arg0, type);
9955 CASE_FLT_FN (BUILT_IN_CBRT):
9956 return fold_builtin_cbrt (loc, arg0, type);
9958 CASE_FLT_FN (BUILT_IN_ASIN):
9959 if (validate_arg (arg0, REAL_TYPE))
9960 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9961 &dconstm1, &dconst1, true);
9964 CASE_FLT_FN (BUILT_IN_ACOS):
9965 if (validate_arg (arg0, REAL_TYPE))
9966 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9967 &dconstm1, &dconst1, true);
9970 CASE_FLT_FN (BUILT_IN_ATAN):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9975 CASE_FLT_FN (BUILT_IN_ASINH):
9976 if (validate_arg (arg0, REAL_TYPE))
9977 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9980 CASE_FLT_FN (BUILT_IN_ACOSH):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9983 &dconst1, NULL, true);
9986 CASE_FLT_FN (BUILT_IN_ATANH):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9989 &dconstm1, &dconst1, false);
9992 CASE_FLT_FN (BUILT_IN_SIN):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9997 CASE_FLT_FN (BUILT_IN_COS):
9998 return fold_builtin_cos (loc, arg0, type, fndecl);
10000 CASE_FLT_FN (BUILT_IN_TAN):
10001 return fold_builtin_tan (arg0, type);
10003 CASE_FLT_FN (BUILT_IN_CEXP):
10004 return fold_builtin_cexp (loc, arg0, type);
10006 CASE_FLT_FN (BUILT_IN_CEXPI):
10007 if (validate_arg (arg0, REAL_TYPE))
10008 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10011 CASE_FLT_FN (BUILT_IN_SINH):
10012 if (validate_arg (arg0, REAL_TYPE))
10013 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10016 CASE_FLT_FN (BUILT_IN_COSH):
10017 return fold_builtin_cosh (loc, arg0, type, fndecl);
10019 CASE_FLT_FN (BUILT_IN_TANH):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10024 CASE_FLT_FN (BUILT_IN_ERF):
10025 if (validate_arg (arg0, REAL_TYPE))
10026 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10029 CASE_FLT_FN (BUILT_IN_ERFC):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10034 CASE_FLT_FN (BUILT_IN_TGAMMA):
10035 if (validate_arg (arg0, REAL_TYPE))
10036 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10039 CASE_FLT_FN (BUILT_IN_EXP):
10040 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10042 CASE_FLT_FN (BUILT_IN_EXP2):
10043 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10045 CASE_FLT_FN (BUILT_IN_EXP10):
10046 CASE_FLT_FN (BUILT_IN_POW10):
10047 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10049 CASE_FLT_FN (BUILT_IN_EXPM1):
10050 if (validate_arg (arg0, REAL_TYPE))
10051 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10054 CASE_FLT_FN (BUILT_IN_LOG):
10055 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10057 CASE_FLT_FN (BUILT_IN_LOG2):
10058 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10060 CASE_FLT_FN (BUILT_IN_LOG10):
10061 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10063 CASE_FLT_FN (BUILT_IN_LOG1P):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10066 &dconstm1, NULL, false);
10069 CASE_FLT_FN (BUILT_IN_J0):
10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10075 CASE_FLT_FN (BUILT_IN_J1):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10081 CASE_FLT_FN (BUILT_IN_Y0):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10084 &dconst0, NULL, false);
10087 CASE_FLT_FN (BUILT_IN_Y1):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10090 &dconst0, NULL, false);
10093 CASE_FLT_FN (BUILT_IN_NAN):
10094 case BUILT_IN_NAND32:
10095 case BUILT_IN_NAND64:
10096 case BUILT_IN_NAND128:
10097 return fold_builtin_nan (arg0, type, true);
10099 CASE_FLT_FN (BUILT_IN_NANS):
10100 return fold_builtin_nan (arg0, type, false);
10102 CASE_FLT_FN (BUILT_IN_FLOOR):
10103 return fold_builtin_floor (loc, fndecl, arg0);
10105 CASE_FLT_FN (BUILT_IN_CEIL):
10106 return fold_builtin_ceil (loc, fndecl, arg0);
10108 CASE_FLT_FN (BUILT_IN_TRUNC):
10109 return fold_builtin_trunc (loc, fndecl, arg0);
10111 CASE_FLT_FN (BUILT_IN_ROUND):
10112 return fold_builtin_round (loc, fndecl, arg0);
10114 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10115 CASE_FLT_FN (BUILT_IN_RINT):
10116 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10118 CASE_FLT_FN (BUILT_IN_LCEIL):
10119 CASE_FLT_FN (BUILT_IN_LLCEIL):
10120 CASE_FLT_FN (BUILT_IN_LFLOOR):
10121 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10122 CASE_FLT_FN (BUILT_IN_LROUND):
10123 CASE_FLT_FN (BUILT_IN_LLROUND):
10124 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10126 CASE_FLT_FN (BUILT_IN_LRINT):
10127 CASE_FLT_FN (BUILT_IN_LLRINT):
10128 return fold_fixed_mathfn (loc, fndecl, arg0);
10130 case BUILT_IN_BSWAP32:
10131 case BUILT_IN_BSWAP64:
10132 return fold_builtin_bswap (fndecl, arg0);
10134 CASE_INT_FN (BUILT_IN_FFS):
10135 CASE_INT_FN (BUILT_IN_CLZ):
10136 CASE_INT_FN (BUILT_IN_CTZ):
10137 CASE_INT_FN (BUILT_IN_POPCOUNT):
10138 CASE_INT_FN (BUILT_IN_PARITY):
10139 return fold_builtin_bitop (fndecl, arg0);
10141 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10142 return fold_builtin_signbit (loc, arg0, type);
10144 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10145 return fold_builtin_significand (loc, arg0, type);
10147 CASE_FLT_FN (BUILT_IN_ILOGB):
10148 CASE_FLT_FN (BUILT_IN_LOGB):
10149 return fold_builtin_logb (loc, arg0, type);
10151 case BUILT_IN_ISASCII:
10152 return fold_builtin_isascii (loc, arg0);
10154 case BUILT_IN_TOASCII:
10155 return fold_builtin_toascii (loc, arg0);
10157 case BUILT_IN_ISDIGIT:
10158 return fold_builtin_isdigit (loc, arg0);
10160 CASE_FLT_FN (BUILT_IN_FINITE):
10161 case BUILT_IN_FINITED32:
10162 case BUILT_IN_FINITED64:
10163 case BUILT_IN_FINITED128:
10164 case BUILT_IN_ISFINITE:
10166 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10169 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10172 CASE_FLT_FN (BUILT_IN_ISINF):
10173 case BUILT_IN_ISINFD32:
10174 case BUILT_IN_ISINFD64:
10175 case BUILT_IN_ISINFD128:
10177 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10180 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10183 case BUILT_IN_ISNORMAL:
10184 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10186 case BUILT_IN_ISINF_SIGN:
10187 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10189 CASE_FLT_FN (BUILT_IN_ISNAN):
10190 case BUILT_IN_ISNAND32:
10191 case BUILT_IN_ISNAND64:
10192 case BUILT_IN_ISNAND128:
10193 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10195 case BUILT_IN_PRINTF:
10196 case BUILT_IN_PRINTF_UNLOCKED:
10197 case BUILT_IN_VPRINTF:
10198 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10200 case BUILT_IN_FREE:
10201 if (integer_zerop (arg0))
10202 return build_empty_stmt (loc);
10213 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10214 IGNORE is true if the result of the function call is ignored. This
10215 function returns NULL_TREE if no simplification was possible. */
10218 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10220 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10221 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10225 CASE_FLT_FN (BUILT_IN_JN):
10226 if (validate_arg (arg0, INTEGER_TYPE)
10227 && validate_arg (arg1, REAL_TYPE))
10228 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10231 CASE_FLT_FN (BUILT_IN_YN):
10232 if (validate_arg (arg0, INTEGER_TYPE)
10233 && validate_arg (arg1, REAL_TYPE))
10234 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10238 CASE_FLT_FN (BUILT_IN_DREM):
10239 CASE_FLT_FN (BUILT_IN_REMAINDER):
10240 if (validate_arg (arg0, REAL_TYPE)
10241 && validate_arg(arg1, REAL_TYPE))
10242 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10245 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10246 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10247 if (validate_arg (arg0, REAL_TYPE)
10248 && validate_arg(arg1, POINTER_TYPE))
10249 return do_mpfr_lgamma_r (arg0, arg1, type);
10252 CASE_FLT_FN (BUILT_IN_ATAN2):
10253 if (validate_arg (arg0, REAL_TYPE)
10254 && validate_arg(arg1, REAL_TYPE))
10255 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10258 CASE_FLT_FN (BUILT_IN_FDIM):
10259 if (validate_arg (arg0, REAL_TYPE)
10260 && validate_arg(arg1, REAL_TYPE))
10261 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10264 CASE_FLT_FN (BUILT_IN_HYPOT):
10265 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10267 CASE_FLT_FN (BUILT_IN_CPOW):
10268 if (validate_arg (arg0, COMPLEX_TYPE)
10269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10270 && validate_arg (arg1, COMPLEX_TYPE)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10272 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10275 CASE_FLT_FN (BUILT_IN_LDEXP):
10276 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10277 CASE_FLT_FN (BUILT_IN_SCALBN):
10278 CASE_FLT_FN (BUILT_IN_SCALBLN):
10279 return fold_builtin_load_exponent (loc, arg0, arg1,
10280 type, /*ldexp=*/false);
10282 CASE_FLT_FN (BUILT_IN_FREXP):
10283 return fold_builtin_frexp (loc, arg0, arg1, type);
10285 CASE_FLT_FN (BUILT_IN_MODF):
10286 return fold_builtin_modf (loc, arg0, arg1, type);
10288 case BUILT_IN_BZERO:
10289 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10291 case BUILT_IN_FPUTS:
10292 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10294 case BUILT_IN_FPUTS_UNLOCKED:
10295 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10297 case BUILT_IN_STRSTR:
10298 return fold_builtin_strstr (loc, arg0, arg1, type);
10300 case BUILT_IN_STRCAT:
10301 return fold_builtin_strcat (loc, arg0, arg1);
10303 case BUILT_IN_STRSPN:
10304 return fold_builtin_strspn (loc, arg0, arg1);
10306 case BUILT_IN_STRCSPN:
10307 return fold_builtin_strcspn (loc, arg0, arg1);
10309 case BUILT_IN_STRCHR:
10310 case BUILT_IN_INDEX:
10311 return fold_builtin_strchr (loc, arg0, arg1, type);
10313 case BUILT_IN_STRRCHR:
10314 case BUILT_IN_RINDEX:
10315 return fold_builtin_strrchr (loc, arg0, arg1, type);
10317 case BUILT_IN_STRCPY:
10318 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10320 case BUILT_IN_STPCPY:
10323 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10327 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10330 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10333 case BUILT_IN_STRCMP:
10334 return fold_builtin_strcmp (loc, arg0, arg1);
10336 case BUILT_IN_STRPBRK:
10337 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10339 case BUILT_IN_EXPECT:
10340 return fold_builtin_expect (loc, arg0, arg1);
10342 CASE_FLT_FN (BUILT_IN_POW):
10343 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10345 CASE_FLT_FN (BUILT_IN_POWI):
10346 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10348 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10349 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10351 CASE_FLT_FN (BUILT_IN_FMIN):
10352 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10354 CASE_FLT_FN (BUILT_IN_FMAX):
10355 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10357 case BUILT_IN_ISGREATER:
10358 return fold_builtin_unordered_cmp (loc, fndecl,
10359 arg0, arg1, UNLE_EXPR, LE_EXPR);
10360 case BUILT_IN_ISGREATEREQUAL:
10361 return fold_builtin_unordered_cmp (loc, fndecl,
10362 arg0, arg1, UNLT_EXPR, LT_EXPR);
10363 case BUILT_IN_ISLESS:
10364 return fold_builtin_unordered_cmp (loc, fndecl,
10365 arg0, arg1, UNGE_EXPR, GE_EXPR);
10366 case BUILT_IN_ISLESSEQUAL:
10367 return fold_builtin_unordered_cmp (loc, fndecl,
10368 arg0, arg1, UNGT_EXPR, GT_EXPR);
10369 case BUILT_IN_ISLESSGREATER:
10370 return fold_builtin_unordered_cmp (loc, fndecl,
10371 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10372 case BUILT_IN_ISUNORDERED:
10373 return fold_builtin_unordered_cmp (loc, fndecl,
10374 arg0, arg1, UNORDERED_EXPR,
10377 /* We do the folding for va_start in the expander. */
10378 case BUILT_IN_VA_START:
10381 case BUILT_IN_SPRINTF:
10382 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10384 case BUILT_IN_OBJECT_SIZE:
10385 return fold_builtin_object_size (arg0, arg1);
10387 case BUILT_IN_PRINTF:
10388 case BUILT_IN_PRINTF_UNLOCKED:
10389 case BUILT_IN_VPRINTF:
10390 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10392 case BUILT_IN_PRINTF_CHK:
10393 case BUILT_IN_VPRINTF_CHK:
10394 if (!validate_arg (arg0, INTEGER_TYPE)
10395 || TREE_SIDE_EFFECTS (arg0))
10398 return fold_builtin_printf (loc, fndecl,
10399 arg1, NULL_TREE, ignore, fcode);
10402 case BUILT_IN_FPRINTF:
10403 case BUILT_IN_FPRINTF_UNLOCKED:
10404 case BUILT_IN_VFPRINTF:
10405 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10414 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10415 and ARG2. IGNORE is true if the result of the function call is ignored.
10416 This function returns NULL_TREE if no simplification was possible. */
10419 fold_builtin_3 (location_t loc, tree fndecl,
10420 tree arg0, tree arg1, tree arg2, bool ignore)
10422 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10423 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10427 CASE_FLT_FN (BUILT_IN_SINCOS):
10428 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10430 CASE_FLT_FN (BUILT_IN_FMA):
10431 if (validate_arg (arg0, REAL_TYPE)
10432 && validate_arg(arg1, REAL_TYPE)
10433 && validate_arg(arg2, REAL_TYPE))
10434 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10437 CASE_FLT_FN (BUILT_IN_REMQUO):
10438 if (validate_arg (arg0, REAL_TYPE)
10439 && validate_arg(arg1, REAL_TYPE)
10440 && validate_arg(arg2, POINTER_TYPE))
10441 return do_mpfr_remquo (arg0, arg1, arg2);
10444 case BUILT_IN_MEMSET:
10445 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10447 case BUILT_IN_BCOPY:
10448 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10449 void_type_node, true, /*endp=*/3);
10451 case BUILT_IN_MEMCPY:
10452 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10453 type, ignore, /*endp=*/0);
10455 case BUILT_IN_MEMPCPY:
10456 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10457 type, ignore, /*endp=*/1);
10459 case BUILT_IN_MEMMOVE:
10460 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10461 type, ignore, /*endp=*/3);
10463 case BUILT_IN_STRNCAT:
10464 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10466 case BUILT_IN_STRNCPY:
10467 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10469 case BUILT_IN_STRNCMP:
10470 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10472 case BUILT_IN_MEMCHR:
10473 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10475 case BUILT_IN_BCMP:
10476 case BUILT_IN_MEMCMP:
10477 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10479 case BUILT_IN_SPRINTF:
10480 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10482 case BUILT_IN_STRCPY_CHK:
10483 case BUILT_IN_STPCPY_CHK:
10484 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10487 case BUILT_IN_STRCAT_CHK:
10488 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10490 case BUILT_IN_PRINTF_CHK:
10491 case BUILT_IN_VPRINTF_CHK:
10492 if (!validate_arg (arg0, INTEGER_TYPE)
10493 || TREE_SIDE_EFFECTS (arg0))
10496 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10499 case BUILT_IN_FPRINTF:
10500 case BUILT_IN_FPRINTF_UNLOCKED:
10501 case BUILT_IN_VFPRINTF:
10502 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10505 case BUILT_IN_FPRINTF_CHK:
10506 case BUILT_IN_VFPRINTF_CHK:
10507 if (!validate_arg (arg1, INTEGER_TYPE)
10508 || TREE_SIDE_EFFECTS (arg1))
10511 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10520 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10521 ARG2, and ARG3. IGNORE is true if the result of the function call is
10522 ignored. This function returns NULL_TREE if no simplification was
10526 fold_builtin_4 (location_t loc, tree fndecl,
10527 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10529 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10533 case BUILT_IN_MEMCPY_CHK:
10534 case BUILT_IN_MEMPCPY_CHK:
10535 case BUILT_IN_MEMMOVE_CHK:
10536 case BUILT_IN_MEMSET_CHK:
10537 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10539 DECL_FUNCTION_CODE (fndecl));
10541 case BUILT_IN_STRNCPY_CHK:
10542 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10544 case BUILT_IN_STRNCAT_CHK:
10545 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10547 case BUILT_IN_FPRINTF_CHK:
10548 case BUILT_IN_VFPRINTF_CHK:
10549 if (!validate_arg (arg1, INTEGER_TYPE)
10550 || TREE_SIDE_EFFECTS (arg1))
10553 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10563 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10564 arguments, where NARGS <= 4. IGNORE is true if the result of the
10565 function call is ignored. This function returns NULL_TREE if no
10566 simplification was possible. Note that this only folds builtins with
10567 fixed argument patterns. Foldings that do varargs-to-varargs
10568 transformations, or that match calls with more than 4 arguments,
10569 need to be handled with fold_builtin_varargs instead. */
10571 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10574 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10576 tree ret = NULL_TREE;
10581 ret = fold_builtin_0 (loc, fndecl, ignore);
10584 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10587 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10590 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10593 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10601 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10602 SET_EXPR_LOCATION (ret, loc);
10603 TREE_NO_WARNING (ret) = 1;
10609 /* Builtins with folding operations that operate on "..." arguments
10610 need special handling; we need to store the arguments in a convenient
10611 data structure before attempting any folding. Fortunately there are
10612 only a few builtins that fall into this category. FNDECL is the
10613 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10614 result of the function call is ignored. */
10617 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10618 bool ignore ATTRIBUTE_UNUSED)
10620 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10621 tree ret = NULL_TREE;
10625 case BUILT_IN_SPRINTF_CHK:
10626 case BUILT_IN_VSPRINTF_CHK:
10627 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10630 case BUILT_IN_SNPRINTF_CHK:
10631 case BUILT_IN_VSNPRINTF_CHK:
10632 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10635 case BUILT_IN_FPCLASSIFY:
10636 ret = fold_builtin_fpclassify (loc, exp);
10644 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10645 SET_EXPR_LOCATION (ret, loc);
10646 TREE_NO_WARNING (ret) = 1;
10652 /* Return true if FNDECL shouldn't be folded right now.
10653 If a built-in function has an inline attribute always_inline
10654 wrapper, defer folding it after always_inline functions have
10655 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10656 might not be performed. */
10659 avoid_folding_inline_builtin (tree fndecl)
10661 return (DECL_DECLARED_INLINE_P (fndecl)
10662 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10664 && !cfun->always_inline_functions_inlined
10665 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10668 /* A wrapper function for builtin folding that prevents warnings for
10669 "statement without effect" and the like, caused by removing the
10670 call node earlier than the warning is generated. */
10673 fold_call_expr (location_t loc, tree exp, bool ignore)
10675 tree ret = NULL_TREE;
10676 tree fndecl = get_callee_fndecl (exp);
10678 && TREE_CODE (fndecl) == FUNCTION_DECL
10679 && DECL_BUILT_IN (fndecl)
10680 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10681 yet. Defer folding until we see all the arguments
10682 (after inlining). */
10683 && !CALL_EXPR_VA_ARG_PACK (exp))
10685 int nargs = call_expr_nargs (exp);
10687 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10688 instead last argument is __builtin_va_arg_pack (). Defer folding
10689 even in that case, until arguments are finalized. */
10690 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10692 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10694 && TREE_CODE (fndecl2) == FUNCTION_DECL
10695 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10696 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10700 if (avoid_folding_inline_builtin (fndecl))
10703 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10704 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10705 CALL_EXPR_ARGP (exp), ignore);
10708 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10710 tree *args = CALL_EXPR_ARGP (exp);
10711 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10714 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10722 /* Conveniently construct a function call expression. FNDECL names the
10723 function to be called and ARGLIST is a TREE_LIST of arguments. */
10726 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10728 tree fntype = TREE_TYPE (fndecl);
10729 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10730 int n = list_length (arglist);
10731 tree *argarray = (tree *) alloca (n * sizeof (tree));
10734 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10735 argarray[i] = TREE_VALUE (arglist);
10736 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10739 /* Conveniently construct a function call expression. FNDECL names the
10740 function to be called, N is the number of arguments, and the "..."
10741 parameters are the argument expressions. */
10744 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10747 tree fntype = TREE_TYPE (fndecl);
10748 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10749 tree *argarray = (tree *) alloca (n * sizeof (tree));
10753 for (i = 0; i < n; i++)
10754 argarray[i] = va_arg (ap, tree);
10756 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10759 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10760 N arguments are passed in the array ARGARRAY. */
10763 fold_builtin_call_array (location_t loc, tree type,
10768 tree ret = NULL_TREE;
10771 if (TREE_CODE (fn) == ADDR_EXPR)
10773 tree fndecl = TREE_OPERAND (fn, 0);
10774 if (TREE_CODE (fndecl) == FUNCTION_DECL
10775 && DECL_BUILT_IN (fndecl))
10777 /* If last argument is __builtin_va_arg_pack (), arguments to this
10778 function are not finalized yet. Defer folding until they are. */
10779 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10781 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10783 && TREE_CODE (fndecl2) == FUNCTION_DECL
10784 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10785 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10786 return build_call_array_loc (loc, type, fn, n, argarray);
10788 if (avoid_folding_inline_builtin (fndecl))
10789 return build_call_array_loc (loc, type, fn, n, argarray);
10790 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10792 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10796 return build_call_array_loc (loc, type, fn, n, argarray);
10798 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10800 /* First try the transformations that don't require consing up
10802 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10807 /* If we got this far, we need to build an exp. */
10808 exp = build_call_array_loc (loc, type, fn, n, argarray);
10809 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10810 return ret ? ret : exp;
10814 return build_call_array_loc (loc, type, fn, n, argarray);
10817 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10818 along with N new arguments specified as the "..." parameters. SKIP
10819 is the number of arguments in EXP to be omitted. This function is used
10820 to do varargs-to-varargs transformations. */
10823 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10825 int oldnargs = call_expr_nargs (exp);
10826 int nargs = oldnargs - skip + n;
10827 tree fntype = TREE_TYPE (fndecl);
10828 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10836 buffer = XALLOCAVEC (tree, nargs);
10838 for (i = 0; i < n; i++)
10839 buffer[i] = va_arg (ap, tree);
10841 for (j = skip; j < oldnargs; j++, i++)
10842 buffer[i] = CALL_EXPR_ARG (exp, j);
10845 buffer = CALL_EXPR_ARGP (exp) + skip;
10847 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10850 /* Validate a single argument ARG against a tree code CODE representing
10854 validate_arg (const_tree arg, enum tree_code code)
10858 else if (code == POINTER_TYPE)
10859 return POINTER_TYPE_P (TREE_TYPE (arg));
10860 else if (code == INTEGER_TYPE)
10861 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10862 return code == TREE_CODE (TREE_TYPE (arg));
10865 /* This function validates the types of a function call argument list
10866 against a specified list of tree_codes. If the last specifier is a 0,
10867 that represents an ellipses, otherwise the last specifier must be a
10870 This is the GIMPLE version of validate_arglist. Eventually we want to
10871 completely convert builtins.c to work from GIMPLEs and the tree based
10872 validate_arglist will then be removed. */
10875 validate_gimple_arglist (const_gimple call, ...)
10877 enum tree_code code;
10883 va_start (ap, call);
10888 code = (enum tree_code) va_arg (ap, int);
10892 /* This signifies an ellipses, any further arguments are all ok. */
10896 /* This signifies an endlink, if no arguments remain, return
10897 true, otherwise return false. */
10898 res = (i == gimple_call_num_args (call));
10901 /* If no parameters remain or the parameter's code does not
10902 match the specified code, return false. Otherwise continue
10903 checking any remaining arguments. */
10904 arg = gimple_call_arg (call, i++);
10905 if (!validate_arg (arg, code))
10912 /* We need gotos here since we can only have one VA_CLOSE in a
10920 /* This function validates the types of a function call argument list
10921 against a specified list of tree_codes. If the last specifier is a 0,
10922 that represents an ellipses, otherwise the last specifier must be a
10926 validate_arglist (const_tree callexpr, ...)
10928 enum tree_code code;
10931 const_call_expr_arg_iterator iter;
10934 va_start (ap, callexpr);
10935 init_const_call_expr_arg_iterator (callexpr, &iter);
10939 code = (enum tree_code) va_arg (ap, int);
10943 /* This signifies an ellipses, any further arguments are all ok. */
10947 /* This signifies an endlink, if no arguments remain, return
10948 true, otherwise return false. */
10949 res = !more_const_call_expr_args_p (&iter);
10952 /* If no parameters remain or the parameter's code does not
10953 match the specified code, return false. Otherwise continue
10954 checking any remaining arguments. */
10955 arg = next_const_call_expr_arg (&iter);
10956 if (!validate_arg (arg, code))
10963 /* We need gotos here since we can only have one VA_CLOSE in a
10971 /* Default target-specific builtin expander that does nothing. */
10974 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10975 rtx target ATTRIBUTE_UNUSED,
10976 rtx subtarget ATTRIBUTE_UNUSED,
10977 enum machine_mode mode ATTRIBUTE_UNUSED,
10978 int ignore ATTRIBUTE_UNUSED)
10983 /* Returns true is EXP represents data that would potentially reside
10984 in a readonly section. */
10987 readonly_data_expr (tree exp)
10991 if (TREE_CODE (exp) != ADDR_EXPR)
10994 exp = get_base_address (TREE_OPERAND (exp, 0));
10998 /* Make sure we call decl_readonly_section only for trees it
10999 can handle (since it returns true for everything it doesn't
11001 if (TREE_CODE (exp) == STRING_CST
11002 || TREE_CODE (exp) == CONSTRUCTOR
11003 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11004 return decl_readonly_section (exp, 0);
11009 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11010 to the call, and TYPE is its return type.
11012 Return NULL_TREE if no simplification was possible, otherwise return the
11013 simplified form of the call as a tree.
11015 The simplified form may be a constant or other expression which
11016 computes the same value, but in a more efficient manner (including
11017 calls to other builtin functions).
11019 The call may contain arguments which need to be evaluated, but
11020 which are not useful to determine the result of the call. In
11021 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11022 COMPOUND_EXPR will be an argument which must be evaluated.
11023 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11024 COMPOUND_EXPR in the chain will contain the tree for the simplified
11025 form of the builtin function call. */
11028 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11030 if (!validate_arg (s1, POINTER_TYPE)
11031 || !validate_arg (s2, POINTER_TYPE))
11036 const char *p1, *p2;
11038 p2 = c_getstr (s2);
11042 p1 = c_getstr (s1);
11045 const char *r = strstr (p1, p2);
11049 return build_int_cst (TREE_TYPE (s1), 0);
11051 /* Return an offset into the constant string argument. */
11052 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11053 s1, size_int (r - p1));
11054 return fold_convert_loc (loc, type, tem);
11057 /* The argument is const char *, and the result is char *, so we need
11058 a type conversion here to avoid a warning. */
11060 return fold_convert_loc (loc, type, s1);
11065 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11069 /* New argument list transforming strstr(s1, s2) to
11070 strchr(s1, s2[0]). */
11071 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11075 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11076 the call, and TYPE is its return type.
11078 Return NULL_TREE if no simplification was possible, otherwise return the
11079 simplified form of the call as a tree.
11081 The simplified form may be a constant or other expression which
11082 computes the same value, but in a more efficient manner (including
11083 calls to other builtin functions).
11085 The call may contain arguments which need to be evaluated, but
11086 which are not useful to determine the result of the call. In
11087 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11088 COMPOUND_EXPR will be an argument which must be evaluated.
11089 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11090 COMPOUND_EXPR in the chain will contain the tree for the simplified
11091 form of the builtin function call. */
11094 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11096 if (!validate_arg (s1, POINTER_TYPE)
11097 || !validate_arg (s2, INTEGER_TYPE))
11103 if (TREE_CODE (s2) != INTEGER_CST)
11106 p1 = c_getstr (s1);
11113 if (target_char_cast (s2, &c))
11116 r = strchr (p1, c);
11119 return build_int_cst (TREE_TYPE (s1), 0);
11121 /* Return an offset into the constant string argument. */
11122 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11123 s1, size_int (r - p1));
11124 return fold_convert_loc (loc, type, tem);
11130 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11131 the call, and TYPE is its return type.
11133 Return NULL_TREE if no simplification was possible, otherwise return the
11134 simplified form of the call as a tree.
11136 The simplified form may be a constant or other expression which
11137 computes the same value, but in a more efficient manner (including
11138 calls to other builtin functions).
11140 The call may contain arguments which need to be evaluated, but
11141 which are not useful to determine the result of the call. In
11142 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11143 COMPOUND_EXPR will be an argument which must be evaluated.
11144 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11145 COMPOUND_EXPR in the chain will contain the tree for the simplified
11146 form of the builtin function call. */
11149 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11151 if (!validate_arg (s1, POINTER_TYPE)
11152 || !validate_arg (s2, INTEGER_TYPE))
11159 if (TREE_CODE (s2) != INTEGER_CST)
11162 p1 = c_getstr (s1);
11169 if (target_char_cast (s2, &c))
11172 r = strrchr (p1, c);
11175 return build_int_cst (TREE_TYPE (s1), 0);
11177 /* Return an offset into the constant string argument. */
11178 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11179 s1, size_int (r - p1));
11180 return fold_convert_loc (loc, type, tem);
11183 if (! integer_zerop (s2))
11186 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11190 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11191 return build_call_expr_loc (loc, fn, 2, s1, s2);
11195 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11196 to the call, and TYPE is its return type.
11198 Return NULL_TREE if no simplification was possible, otherwise return the
11199 simplified form of the call as a tree.
11201 The simplified form may be a constant or other expression which
11202 computes the same value, but in a more efficient manner (including
11203 calls to other builtin functions).
11205 The call may contain arguments which need to be evaluated, but
11206 which are not useful to determine the result of the call. In
11207 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11208 COMPOUND_EXPR will be an argument which must be evaluated.
11209 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11210 COMPOUND_EXPR in the chain will contain the tree for the simplified
11211 form of the builtin function call. */
11214 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11216 if (!validate_arg (s1, POINTER_TYPE)
11217 || !validate_arg (s2, POINTER_TYPE))
11222 const char *p1, *p2;
11224 p2 = c_getstr (s2);
11228 p1 = c_getstr (s1);
11231 const char *r = strpbrk (p1, p2);
11235 return build_int_cst (TREE_TYPE (s1), 0);
11237 /* Return an offset into the constant string argument. */
11238 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11239 s1, size_int (r - p1));
11240 return fold_convert_loc (loc, type, tem);
11244 /* strpbrk(x, "") == NULL.
11245 Evaluate and ignore s1 in case it had side-effects. */
11246 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11249 return NULL_TREE; /* Really call strpbrk. */
11251 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11255 /* New argument list transforming strpbrk(s1, s2) to
11256 strchr(s1, s2[0]). */
11257 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11261 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11264 Return NULL_TREE if no simplification was possible, otherwise return the
11265 simplified form of the call as a tree.
11267 The simplified form may be a constant or other expression which
11268 computes the same value, but in a more efficient manner (including
11269 calls to other builtin functions).
11271 The call may contain arguments which need to be evaluated, but
11272 which are not useful to determine the result of the call. In
11273 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11274 COMPOUND_EXPR will be an argument which must be evaluated.
11275 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11276 COMPOUND_EXPR in the chain will contain the tree for the simplified
11277 form of the builtin function call. */
11280 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11282 if (!validate_arg (dst, POINTER_TYPE)
11283 || !validate_arg (src, POINTER_TYPE))
11287 const char *p = c_getstr (src);
11289 /* If the string length is zero, return the dst parameter. */
11290 if (p && *p == '\0')
11293 if (optimize_insn_for_speed_p ())
11295 /* See if we can store by pieces into (dst + strlen(dst)). */
11297 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11298 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11300 if (!strlen_fn || !strcpy_fn)
11303 /* If we don't have a movstr we don't want to emit an strcpy
11304 call. We have to do that if the length of the source string
11305 isn't computable (in that case we can use memcpy probably
11306 later expanding to a sequence of mov instructions). If we
11307 have movstr instructions we can emit strcpy calls. */
11310 tree len = c_strlen (src, 1);
11311 if (! len || TREE_SIDE_EFFECTS (len))
11315 /* Stabilize the argument list. */
11316 dst = builtin_save_expr (dst);
11318 /* Create strlen (dst). */
11319 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11320 /* Create (dst p+ strlen (dst)). */
11322 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11323 TREE_TYPE (dst), dst, newdst);
11324 newdst = builtin_save_expr (newdst);
11326 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11327 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11333 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11334 arguments to the call.
11336 Return NULL_TREE if no simplification was possible, otherwise return the
11337 simplified form of the call as a tree.
11339 The simplified form may be a constant or other expression which
11340 computes the same value, but in a more efficient manner (including
11341 calls to other builtin functions).
11343 The call may contain arguments which need to be evaluated, but
11344 which are not useful to determine the result of the call. In
11345 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11346 COMPOUND_EXPR will be an argument which must be evaluated.
11347 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11348 COMPOUND_EXPR in the chain will contain the tree for the simplified
11349 form of the builtin function call. */
11352 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11354 if (!validate_arg (dst, POINTER_TYPE)
11355 || !validate_arg (src, POINTER_TYPE)
11356 || !validate_arg (len, INTEGER_TYPE))
11360 const char *p = c_getstr (src);
11362 /* If the requested length is zero, or the src parameter string
11363 length is zero, return the dst parameter. */
11364 if (integer_zerop (len) || (p && *p == '\0'))
11365 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11367 /* If the requested len is greater than or equal to the string
11368 length, call strcat. */
11369 if (TREE_CODE (len) == INTEGER_CST && p
11370 && compare_tree_int (len, strlen (p)) >= 0)
11372 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11374 /* If the replacement _DECL isn't initialized, don't do the
11379 return build_call_expr_loc (loc, fn, 2, dst, src);
11385 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11388 Return NULL_TREE if no simplification was possible, otherwise return the
11389 simplified form of the call as a tree.
11391 The simplified form may be a constant or other expression which
11392 computes the same value, but in a more efficient manner (including
11393 calls to other builtin functions).
11395 The call may contain arguments which need to be evaluated, but
11396 which are not useful to determine the result of the call. In
11397 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11398 COMPOUND_EXPR will be an argument which must be evaluated.
11399 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11400 COMPOUND_EXPR in the chain will contain the tree for the simplified
11401 form of the builtin function call. */
11404 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11406 if (!validate_arg (s1, POINTER_TYPE)
11407 || !validate_arg (s2, POINTER_TYPE))
11411 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11413 /* If both arguments are constants, evaluate at compile-time. */
11416 const size_t r = strspn (p1, p2);
11417 return size_int (r);
11420 /* If either argument is "", return NULL_TREE. */
11421 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11422 /* Evaluate and ignore both arguments in case either one has
11424 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11430 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11433 Return NULL_TREE if no simplification was possible, otherwise return the
11434 simplified form of the call as a tree.
11436 The simplified form may be a constant or other expression which
11437 computes the same value, but in a more efficient manner (including
11438 calls to other builtin functions).
11440 The call may contain arguments which need to be evaluated, but
11441 which are not useful to determine the result of the call. In
11442 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11443 COMPOUND_EXPR will be an argument which must be evaluated.
11444 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11445 COMPOUND_EXPR in the chain will contain the tree for the simplified
11446 form of the builtin function call. */
11449 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11451 if (!validate_arg (s1, POINTER_TYPE)
11452 || !validate_arg (s2, POINTER_TYPE))
11456 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11458 /* If both arguments are constants, evaluate at compile-time. */
11461 const size_t r = strcspn (p1, p2);
11462 return size_int (r);
11465 /* If the first argument is "", return NULL_TREE. */
11466 if (p1 && *p1 == '\0')
11468 /* Evaluate and ignore argument s2 in case it has
11470 return omit_one_operand_loc (loc, size_type_node,
11471 size_zero_node, s2);
11474 /* If the second argument is "", return __builtin_strlen(s1). */
11475 if (p2 && *p2 == '\0')
11477 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11479 /* If the replacement _DECL isn't initialized, don't do the
11484 return build_call_expr_loc (loc, fn, 1, s1);
11490 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11491 to the call. IGNORE is true if the value returned
11492 by the builtin will be ignored. UNLOCKED is true is true if this
11493 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11494 the known length of the string. Return NULL_TREE if no simplification
11498 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11499 bool ignore, bool unlocked, tree len)
11501 /* If we're using an unlocked function, assume the other unlocked
11502 functions exist explicitly. */
11503 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11504 : implicit_built_in_decls[BUILT_IN_FPUTC];
11505 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11506 : implicit_built_in_decls[BUILT_IN_FWRITE];
11508 /* If the return value is used, don't do the transformation. */
11512 /* Verify the arguments in the original call. */
11513 if (!validate_arg (arg0, POINTER_TYPE)
11514 || !validate_arg (arg1, POINTER_TYPE))
11518 len = c_strlen (arg0, 0);
11520 /* Get the length of the string passed to fputs. If the length
11521 can't be determined, punt. */
11523 || TREE_CODE (len) != INTEGER_CST)
11526 switch (compare_tree_int (len, 1))
11528 case -1: /* length is 0, delete the call entirely . */
11529 return omit_one_operand_loc (loc, integer_type_node,
11530 integer_zero_node, arg1);;
11532 case 0: /* length is 1, call fputc. */
11534 const char *p = c_getstr (arg0);
11539 return build_call_expr_loc (loc, fn_fputc, 2,
11540 build_int_cst (NULL_TREE, p[0]), arg1);
11546 case 1: /* length is greater than 1, call fwrite. */
11548 /* If optimizing for size keep fputs. */
11549 if (optimize_function_for_size_p (cfun))
11551 /* New argument list transforming fputs(string, stream) to
11552 fwrite(string, 1, len, stream). */
11554 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11555 size_one_node, len, arg1);
11560 gcc_unreachable ();
11565 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11566 produced. False otherwise. This is done so that we don't output the error
11567 or warning twice or three times. */
11570 fold_builtin_next_arg (tree exp, bool va_start_p)
11572 tree fntype = TREE_TYPE (current_function_decl);
11573 int nargs = call_expr_nargs (exp);
11576 if (TYPE_ARG_TYPES (fntype) == 0
11577 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11578 == void_type_node))
11580 error ("%<va_start%> used in function with fixed args");
11586 if (va_start_p && (nargs != 2))
11588 error ("wrong number of arguments to function %<va_start%>");
11591 arg = CALL_EXPR_ARG (exp, 1);
11593 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11594 when we checked the arguments and if needed issued a warning. */
11599 /* Evidently an out of date version of <stdarg.h>; can't validate
11600 va_start's second argument, but can still work as intended. */
11601 warning (0, "%<__builtin_next_arg%> called without an argument");
11604 else if (nargs > 1)
11606 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11609 arg = CALL_EXPR_ARG (exp, 0);
11612 if (TREE_CODE (arg) == SSA_NAME)
11613 arg = SSA_NAME_VAR (arg);
11615 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11616 or __builtin_next_arg (0) the first time we see it, after checking
11617 the arguments and if needed issuing a warning. */
11618 if (!integer_zerop (arg))
11620 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11622 /* Strip off all nops for the sake of the comparison. This
11623 is not quite the same as STRIP_NOPS. It does more.
11624 We must also strip off INDIRECT_EXPR for C++ reference
11626 while (CONVERT_EXPR_P (arg)
11627 || TREE_CODE (arg) == INDIRECT_REF)
11628 arg = TREE_OPERAND (arg, 0);
11629 if (arg != last_parm)
11631 /* FIXME: Sometimes with the tree optimizers we can get the
11632 not the last argument even though the user used the last
11633 argument. We just warn and set the arg to be the last
11634 argument so that we will get wrong-code because of
11636 warning (0, "second parameter of %<va_start%> not last named argument");
11639 /* Undefined by C99 7.15.1.4p4 (va_start):
11640 "If the parameter parmN is declared with the register storage
11641 class, with a function or array type, or with a type that is
11642 not compatible with the type that results after application of
11643 the default argument promotions, the behavior is undefined."
11645 else if (DECL_REGISTER (arg))
11646 warning (0, "undefined behaviour when second parameter of "
11647 "%<va_start%> is declared with %<register%> storage");
11649 /* We want to verify the second parameter just once before the tree
11650 optimizers are run and then avoid keeping it in the tree,
11651 as otherwise we could warn even for correct code like:
11652 void foo (int i, ...)
11653 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11655 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11657 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11663 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11664 ORIG may be null if this is a 2-argument call. We don't attempt to
11665 simplify calls with more than 3 arguments.
11667 Return NULL_TREE if no simplification was possible, otherwise return the
11668 simplified form of the call as a tree. If IGNORED is true, it means that
11669 the caller does not use the returned value of the function. */
11672 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11673 tree orig, int ignored)
11676 const char *fmt_str = NULL;
11678 /* Verify the required arguments in the original call. We deal with two
11679 types of sprintf() calls: 'sprintf (str, fmt)' and
11680 'sprintf (dest, "%s", orig)'. */
11681 if (!validate_arg (dest, POINTER_TYPE)
11682 || !validate_arg (fmt, POINTER_TYPE))
11684 if (orig && !validate_arg (orig, POINTER_TYPE))
11687 /* Check whether the format is a literal string constant. */
11688 fmt_str = c_getstr (fmt);
11689 if (fmt_str == NULL)
11693 retval = NULL_TREE;
11695 if (!init_target_chars ())
11698 /* If the format doesn't contain % args or %%, use strcpy. */
11699 if (strchr (fmt_str, target_percent) == NULL)
11701 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11706 /* Don't optimize sprintf (buf, "abc", ptr++). */
11710 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11711 'format' is known to contain no % formats. */
11712 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11714 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11717 /* If the format is "%s", use strcpy if the result isn't used. */
11718 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11721 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11726 /* Don't crash on sprintf (str1, "%s"). */
11730 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11733 retval = c_strlen (orig, 1);
11734 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11737 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11740 if (call && retval)
11742 retval = fold_convert_loc
11743 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11745 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11751 /* Expand a call EXP to __builtin_object_size. */
11754 expand_builtin_object_size (tree exp)
11757 int object_size_type;
11758 tree fndecl = get_callee_fndecl (exp);
11760 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11762 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11764 expand_builtin_trap ();
11768 ost = CALL_EXPR_ARG (exp, 1);
11771 if (TREE_CODE (ost) != INTEGER_CST
11772 || tree_int_cst_sgn (ost) < 0
11773 || compare_tree_int (ost, 3) > 0)
11775 error ("%Klast argument of %D is not integer constant between 0 and 3",
11777 expand_builtin_trap ();
11781 object_size_type = tree_low_cst (ost, 0);
11783 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11786 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11787 FCODE is the BUILT_IN_* to use.
11788 Return NULL_RTX if we failed; the caller should emit a normal call,
11789 otherwise try to get the result in TARGET, if convenient (and in
11790 mode MODE if that's convenient). */
11793 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11794 enum built_in_function fcode)
11796 tree dest, src, len, size;
11798 if (!validate_arglist (exp,
11800 fcode == BUILT_IN_MEMSET_CHK
11801 ? INTEGER_TYPE : POINTER_TYPE,
11802 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11805 dest = CALL_EXPR_ARG (exp, 0);
11806 src = CALL_EXPR_ARG (exp, 1);
11807 len = CALL_EXPR_ARG (exp, 2);
11808 size = CALL_EXPR_ARG (exp, 3);
11810 if (! host_integerp (size, 1))
11813 if (host_integerp (len, 1) || integer_all_onesp (size))
11817 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11819 warning_at (tree_nonartificial_location (exp),
11820 0, "%Kcall to %D will always overflow destination buffer",
11821 exp, get_callee_fndecl (exp));
11826 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11827 mem{cpy,pcpy,move,set} is available. */
11830 case BUILT_IN_MEMCPY_CHK:
11831 fn = built_in_decls[BUILT_IN_MEMCPY];
11833 case BUILT_IN_MEMPCPY_CHK:
11834 fn = built_in_decls[BUILT_IN_MEMPCPY];
11836 case BUILT_IN_MEMMOVE_CHK:
11837 fn = built_in_decls[BUILT_IN_MEMMOVE];
11839 case BUILT_IN_MEMSET_CHK:
11840 fn = built_in_decls[BUILT_IN_MEMSET];
11849 fn = build_call_nofold (fn, 3, dest, src, len);
11850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11852 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11854 else if (fcode == BUILT_IN_MEMSET_CHK)
11858 unsigned int dest_align
11859 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11861 /* If DEST is not a pointer type, call the normal function. */
11862 if (dest_align == 0)
11865 /* If SRC and DEST are the same (and not volatile), do nothing. */
11866 if (operand_equal_p (src, dest, 0))
11870 if (fcode != BUILT_IN_MEMPCPY_CHK)
11872 /* Evaluate and ignore LEN in case it has side-effects. */
11873 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11874 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11877 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11878 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11881 /* __memmove_chk special case. */
11882 if (fcode == BUILT_IN_MEMMOVE_CHK)
11884 unsigned int src_align
11885 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11887 if (src_align == 0)
11890 /* If src is categorized for a readonly section we can use
11891 normal __memcpy_chk. */
11892 if (readonly_data_expr (src))
11894 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11897 fn = build_call_nofold (fn, 4, dest, src, len, size);
11898 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11899 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11900 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11907 /* Emit warning if a buffer overflow is detected at compile time. */
11910 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11914 location_t loc = tree_nonartificial_location (exp);
11918 case BUILT_IN_STRCPY_CHK:
11919 case BUILT_IN_STPCPY_CHK:
11920 /* For __strcat_chk the warning will be emitted only if overflowing
11921 by at least strlen (dest) + 1 bytes. */
11922 case BUILT_IN_STRCAT_CHK:
11923 len = CALL_EXPR_ARG (exp, 1);
11924 size = CALL_EXPR_ARG (exp, 2);
11927 case BUILT_IN_STRNCAT_CHK:
11928 case BUILT_IN_STRNCPY_CHK:
11929 len = CALL_EXPR_ARG (exp, 2);
11930 size = CALL_EXPR_ARG (exp, 3);
11932 case BUILT_IN_SNPRINTF_CHK:
11933 case BUILT_IN_VSNPRINTF_CHK:
11934 len = CALL_EXPR_ARG (exp, 1);
11935 size = CALL_EXPR_ARG (exp, 3);
11938 gcc_unreachable ();
11944 if (! host_integerp (size, 1) || integer_all_onesp (size))
11949 len = c_strlen (len, 1);
11950 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11953 else if (fcode == BUILT_IN_STRNCAT_CHK)
11955 tree src = CALL_EXPR_ARG (exp, 1);
11956 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11958 src = c_strlen (src, 1);
11959 if (! src || ! host_integerp (src, 1))
11961 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11962 exp, get_callee_fndecl (exp));
11965 else if (tree_int_cst_lt (src, size))
11968 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11971 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11972 exp, get_callee_fndecl (exp));
11975 /* Emit warning if a buffer overflow is detected at compile time
11976 in __sprintf_chk/__vsprintf_chk calls. */
11979 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11981 tree size, len, fmt;
11982 const char *fmt_str;
11983 int nargs = call_expr_nargs (exp);
11985 /* Verify the required arguments in the original call. */
11989 size = CALL_EXPR_ARG (exp, 2);
11990 fmt = CALL_EXPR_ARG (exp, 3);
11992 if (! host_integerp (size, 1) || integer_all_onesp (size))
11995 /* Check whether the format is a literal string constant. */
11996 fmt_str = c_getstr (fmt);
11997 if (fmt_str == NULL)
12000 if (!init_target_chars ())
12003 /* If the format doesn't contain % args or %%, we know its size. */
12004 if (strchr (fmt_str, target_percent) == 0)
12005 len = build_int_cstu (size_type_node, strlen (fmt_str));
12006 /* If the format is "%s" and first ... argument is a string literal,
12008 else if (fcode == BUILT_IN_SPRINTF_CHK
12009 && strcmp (fmt_str, target_percent_s) == 0)
12015 arg = CALL_EXPR_ARG (exp, 4);
12016 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12019 len = c_strlen (arg, 1);
12020 if (!len || ! host_integerp (len, 1))
12026 if (! tree_int_cst_lt (len, size))
12027 warning_at (tree_nonartificial_location (exp),
12028 0, "%Kcall to %D will always overflow destination buffer",
12029 exp, get_callee_fndecl (exp));
12032 /* Emit warning if a free is called with address of a variable. */
12035 maybe_emit_free_warning (tree exp)
12037 tree arg = CALL_EXPR_ARG (exp, 0);
12040 if (TREE_CODE (arg) != ADDR_EXPR)
12043 arg = get_base_address (TREE_OPERAND (arg, 0));
12044 if (arg == NULL || INDIRECT_REF_P (arg))
12047 if (SSA_VAR_P (arg))
12048 warning_at (tree_nonartificial_location (exp),
12049 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12051 warning_at (tree_nonartificial_location (exp),
12052 0, "%Kattempt to free a non-heap object", exp);
12055 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12059 fold_builtin_object_size (tree ptr, tree ost)
12061 tree ret = NULL_TREE;
12062 int object_size_type;
12064 if (!validate_arg (ptr, POINTER_TYPE)
12065 || !validate_arg (ost, INTEGER_TYPE))
12070 if (TREE_CODE (ost) != INTEGER_CST
12071 || tree_int_cst_sgn (ost) < 0
12072 || compare_tree_int (ost, 3) > 0)
12075 object_size_type = tree_low_cst (ost, 0);
12077 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12078 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12079 and (size_t) 0 for types 2 and 3. */
12080 if (TREE_SIDE_EFFECTS (ptr))
12081 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12083 if (TREE_CODE (ptr) == ADDR_EXPR)
12084 ret = build_int_cstu (size_type_node,
12085 compute_builtin_object_size (ptr, object_size_type));
12087 else if (TREE_CODE (ptr) == SSA_NAME)
12089 unsigned HOST_WIDE_INT bytes;
12091 /* If object size is not known yet, delay folding until
12092 later. Maybe subsequent passes will help determining
12094 bytes = compute_builtin_object_size (ptr, object_size_type);
12095 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12097 ret = build_int_cstu (size_type_node, bytes);
12102 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12103 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12104 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12111 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12112 DEST, SRC, LEN, and SIZE are the arguments to the call.
12113 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12114 code of the builtin. If MAXLEN is not NULL, it is maximum length
12115 passed as third argument. */
12118 fold_builtin_memory_chk (location_t loc, tree fndecl,
12119 tree dest, tree src, tree len, tree size,
12120 tree maxlen, bool ignore,
12121 enum built_in_function fcode)
12125 if (!validate_arg (dest, POINTER_TYPE)
12126 || !validate_arg (src,
12127 (fcode == BUILT_IN_MEMSET_CHK
12128 ? INTEGER_TYPE : POINTER_TYPE))
12129 || !validate_arg (len, INTEGER_TYPE)
12130 || !validate_arg (size, INTEGER_TYPE))
12133 /* If SRC and DEST are the same (and not volatile), return DEST
12134 (resp. DEST+LEN for __mempcpy_chk). */
12135 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12137 if (fcode != BUILT_IN_MEMPCPY_CHK)
12138 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12142 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12144 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12148 if (! host_integerp (size, 1))
12151 if (! integer_all_onesp (size))
12153 if (! host_integerp (len, 1))
12155 /* If LEN is not constant, try MAXLEN too.
12156 For MAXLEN only allow optimizing into non-_ocs function
12157 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12158 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12160 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12162 /* (void) __mempcpy_chk () can be optimized into
12163 (void) __memcpy_chk (). */
12164 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12168 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12176 if (tree_int_cst_lt (size, maxlen))
12181 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12182 mem{cpy,pcpy,move,set} is available. */
12185 case BUILT_IN_MEMCPY_CHK:
12186 fn = built_in_decls[BUILT_IN_MEMCPY];
12188 case BUILT_IN_MEMPCPY_CHK:
12189 fn = built_in_decls[BUILT_IN_MEMPCPY];
12191 case BUILT_IN_MEMMOVE_CHK:
12192 fn = built_in_decls[BUILT_IN_MEMMOVE];
12194 case BUILT_IN_MEMSET_CHK:
12195 fn = built_in_decls[BUILT_IN_MEMSET];
12204 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12207 /* Fold a call to the __st[rp]cpy_chk builtin.
12208 DEST, SRC, and SIZE are the arguments to the call.
12209 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12210 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12211 strings passed as second argument. */
12214 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12215 tree src, tree size,
12216 tree maxlen, bool ignore,
12217 enum built_in_function fcode)
12221 if (!validate_arg (dest, POINTER_TYPE)
12222 || !validate_arg (src, POINTER_TYPE)
12223 || !validate_arg (size, INTEGER_TYPE))
12226 /* If SRC and DEST are the same (and not volatile), return DEST. */
12227 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12228 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12230 if (! host_integerp (size, 1))
12233 if (! integer_all_onesp (size))
12235 len = c_strlen (src, 1);
12236 if (! len || ! host_integerp (len, 1))
12238 /* If LEN is not constant, try MAXLEN too.
12239 For MAXLEN only allow optimizing into non-_ocs function
12240 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12241 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12243 if (fcode == BUILT_IN_STPCPY_CHK)
12248 /* If return value of __stpcpy_chk is ignored,
12249 optimize into __strcpy_chk. */
12250 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12254 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12257 if (! len || TREE_SIDE_EFFECTS (len))
12260 /* If c_strlen returned something, but not a constant,
12261 transform __strcpy_chk into __memcpy_chk. */
12262 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12266 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12267 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12268 build_call_expr_loc (loc, fn, 4,
12269 dest, src, len, size));
12275 if (! tree_int_cst_lt (maxlen, size))
12279 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12280 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12281 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12285 return build_call_expr_loc (loc, fn, 2, dest, src);
12288 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12289 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12290 length passed as third argument. */
12293 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12294 tree len, tree size, tree maxlen)
12298 if (!validate_arg (dest, POINTER_TYPE)
12299 || !validate_arg (src, POINTER_TYPE)
12300 || !validate_arg (len, INTEGER_TYPE)
12301 || !validate_arg (size, INTEGER_TYPE))
12304 if (! host_integerp (size, 1))
12307 if (! integer_all_onesp (size))
12309 if (! host_integerp (len, 1))
12311 /* If LEN is not constant, try MAXLEN too.
12312 For MAXLEN only allow optimizing into non-_ocs function
12313 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12314 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12320 if (tree_int_cst_lt (size, maxlen))
12324 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12325 fn = built_in_decls[BUILT_IN_STRNCPY];
12329 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12332 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12333 are the arguments to the call. */
12336 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12337 tree src, tree size)
12342 if (!validate_arg (dest, POINTER_TYPE)
12343 || !validate_arg (src, POINTER_TYPE)
12344 || !validate_arg (size, INTEGER_TYPE))
12347 p = c_getstr (src);
12348 /* If the SRC parameter is "", return DEST. */
12349 if (p && *p == '\0')
12350 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12352 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12355 /* If __builtin_strcat_chk is used, assume strcat is available. */
12356 fn = built_in_decls[BUILT_IN_STRCAT];
12360 return build_call_expr_loc (loc, fn, 2, dest, src);
12363 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12367 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12368 tree dest, tree src, tree len, tree size)
12373 if (!validate_arg (dest, POINTER_TYPE)
12374 || !validate_arg (src, POINTER_TYPE)
12375 || !validate_arg (size, INTEGER_TYPE)
12376 || !validate_arg (size, INTEGER_TYPE))
12379 p = c_getstr (src);
12380 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12381 if (p && *p == '\0')
12382 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12383 else if (integer_zerop (len))
12384 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12386 if (! host_integerp (size, 1))
12389 if (! integer_all_onesp (size))
12391 tree src_len = c_strlen (src, 1);
12393 && host_integerp (src_len, 1)
12394 && host_integerp (len, 1)
12395 && ! tree_int_cst_lt (len, src_len))
12397 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12398 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12402 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12407 /* If __builtin_strncat_chk is used, assume strncat is available. */
12408 fn = built_in_decls[BUILT_IN_STRNCAT];
12412 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12415 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12416 a normal call should be emitted rather than expanding the function
12417 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12420 fold_builtin_sprintf_chk (location_t loc, tree exp,
12421 enum built_in_function fcode)
12423 tree dest, size, len, fn, fmt, flag;
12424 const char *fmt_str;
12425 int nargs = call_expr_nargs (exp);
12427 /* Verify the required arguments in the original call. */
12430 dest = CALL_EXPR_ARG (exp, 0);
12431 if (!validate_arg (dest, POINTER_TYPE))
12433 flag = CALL_EXPR_ARG (exp, 1);
12434 if (!validate_arg (flag, INTEGER_TYPE))
12436 size = CALL_EXPR_ARG (exp, 2);
12437 if (!validate_arg (size, INTEGER_TYPE))
12439 fmt = CALL_EXPR_ARG (exp, 3);
12440 if (!validate_arg (fmt, POINTER_TYPE))
12443 if (! host_integerp (size, 1))
12448 if (!init_target_chars ())
12451 /* Check whether the format is a literal string constant. */
12452 fmt_str = c_getstr (fmt);
12453 if (fmt_str != NULL)
12455 /* If the format doesn't contain % args or %%, we know the size. */
12456 if (strchr (fmt_str, target_percent) == 0)
12458 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12459 len = build_int_cstu (size_type_node, strlen (fmt_str));
12461 /* If the format is "%s" and first ... argument is a string literal,
12462 we know the size too. */
12463 else if (fcode == BUILT_IN_SPRINTF_CHK
12464 && strcmp (fmt_str, target_percent_s) == 0)
12470 arg = CALL_EXPR_ARG (exp, 4);
12471 if (validate_arg (arg, POINTER_TYPE))
12473 len = c_strlen (arg, 1);
12474 if (! len || ! host_integerp (len, 1))
12481 if (! integer_all_onesp (size))
12483 if (! len || ! tree_int_cst_lt (len, size))
12487 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12488 or if format doesn't contain % chars or is "%s". */
12489 if (! integer_zerop (flag))
12491 if (fmt_str == NULL)
12493 if (strchr (fmt_str, target_percent) != NULL
12494 && strcmp (fmt_str, target_percent_s))
12498 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12499 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12500 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12504 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12507 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12508 a normal call should be emitted rather than expanding the function
12509 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12510 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12511 passed as second argument. */
12514 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12515 enum built_in_function fcode)
12517 tree dest, size, len, fn, fmt, flag;
12518 const char *fmt_str;
12520 /* Verify the required arguments in the original call. */
12521 if (call_expr_nargs (exp) < 5)
12523 dest = CALL_EXPR_ARG (exp, 0);
12524 if (!validate_arg (dest, POINTER_TYPE))
12526 len = CALL_EXPR_ARG (exp, 1);
12527 if (!validate_arg (len, INTEGER_TYPE))
12529 flag = CALL_EXPR_ARG (exp, 2);
12530 if (!validate_arg (flag, INTEGER_TYPE))
12532 size = CALL_EXPR_ARG (exp, 3);
12533 if (!validate_arg (size, INTEGER_TYPE))
12535 fmt = CALL_EXPR_ARG (exp, 4);
12536 if (!validate_arg (fmt, POINTER_TYPE))
12539 if (! host_integerp (size, 1))
12542 if (! integer_all_onesp (size))
12544 if (! host_integerp (len, 1))
12546 /* If LEN is not constant, try MAXLEN too.
12547 For MAXLEN only allow optimizing into non-_ocs function
12548 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12549 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12555 if (tree_int_cst_lt (size, maxlen))
12559 if (!init_target_chars ())
12562 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12563 or if format doesn't contain % chars or is "%s". */
12564 if (! integer_zerop (flag))
12566 fmt_str = c_getstr (fmt);
12567 if (fmt_str == NULL)
12569 if (strchr (fmt_str, target_percent) != NULL
12570 && strcmp (fmt_str, target_percent_s))
12574 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12576 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12577 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12581 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12584 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12585 FMT and ARG are the arguments to the call; we don't fold cases with
12586 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12588 Return NULL_TREE if no simplification was possible, otherwise return the
12589 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12590 code of the function to be simplified. */
12593 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12594 tree arg, bool ignore,
12595 enum built_in_function fcode)
12597 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12598 const char *fmt_str = NULL;
12600 /* If the return value is used, don't do the transformation. */
12604 /* Verify the required arguments in the original call. */
12605 if (!validate_arg (fmt, POINTER_TYPE))
12608 /* Check whether the format is a literal string constant. */
12609 fmt_str = c_getstr (fmt);
12610 if (fmt_str == NULL)
12613 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12615 /* If we're using an unlocked function, assume the other
12616 unlocked functions exist explicitly. */
12617 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12618 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12622 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12623 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12626 if (!init_target_chars ())
12629 if (strcmp (fmt_str, target_percent_s) == 0
12630 || strchr (fmt_str, target_percent) == NULL)
12634 if (strcmp (fmt_str, target_percent_s) == 0)
12636 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12639 if (!arg || !validate_arg (arg, POINTER_TYPE))
12642 str = c_getstr (arg);
12648 /* The format specifier doesn't contain any '%' characters. */
12649 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12655 /* If the string was "", printf does nothing. */
12656 if (str[0] == '\0')
12657 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12659 /* If the string has length of 1, call putchar. */
12660 if (str[1] == '\0')
12662 /* Given printf("c"), (where c is any one character,)
12663 convert "c"[0] to an int and pass that to the replacement
12665 newarg = build_int_cst (NULL_TREE, str[0]);
12667 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12671 /* If the string was "string\n", call puts("string"). */
12672 size_t len = strlen (str);
12673 if ((unsigned char)str[len - 1] == target_newline)
12675 /* Create a NUL-terminated string that's one char shorter
12676 than the original, stripping off the trailing '\n'. */
12677 char *newstr = XALLOCAVEC (char, len);
12678 memcpy (newstr, str, len - 1);
12679 newstr[len - 1] = 0;
12681 newarg = build_string_literal (len, newstr);
12683 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12686 /* We'd like to arrange to call fputs(string,stdout) here,
12687 but we need stdout and don't have a way to get it yet. */
12692 /* The other optimizations can be done only on the non-va_list variants. */
12693 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12696 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12697 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12699 if (!arg || !validate_arg (arg, POINTER_TYPE))
12702 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12705 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12706 else if (strcmp (fmt_str, target_percent_c) == 0)
12708 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12711 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12717 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12720 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12721 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12722 more than 3 arguments, and ARG may be null in the 2-argument case.
12724 Return NULL_TREE if no simplification was possible, otherwise return the
12725 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12726 code of the function to be simplified. */
12729 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12730 tree fmt, tree arg, bool ignore,
12731 enum built_in_function fcode)
12733 tree fn_fputc, fn_fputs, call = NULL_TREE;
12734 const char *fmt_str = NULL;
12736 /* If the return value is used, don't do the transformation. */
12740 /* Verify the required arguments in the original call. */
12741 if (!validate_arg (fp, POINTER_TYPE))
12743 if (!validate_arg (fmt, POINTER_TYPE))
12746 /* Check whether the format is a literal string constant. */
12747 fmt_str = c_getstr (fmt);
12748 if (fmt_str == NULL)
12751 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12753 /* If we're using an unlocked function, assume the other
12754 unlocked functions exist explicitly. */
12755 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12756 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12760 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12761 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12764 if (!init_target_chars ())
12767 /* If the format doesn't contain % args or %%, use strcpy. */
12768 if (strchr (fmt_str, target_percent) == NULL)
12770 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12774 /* If the format specifier was "", fprintf does nothing. */
12775 if (fmt_str[0] == '\0')
12777 /* If FP has side-effects, just wait until gimplification is
12779 if (TREE_SIDE_EFFECTS (fp))
12782 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12785 /* When "string" doesn't contain %, replace all cases of
12786 fprintf (fp, string) with fputs (string, fp). The fputs
12787 builtin will take care of special cases like length == 1. */
12789 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12792 /* The other optimizations can be done only on the non-va_list variants. */
12793 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12796 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12797 else if (strcmp (fmt_str, target_percent_s) == 0)
12799 if (!arg || !validate_arg (arg, POINTER_TYPE))
12802 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12805 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12806 else if (strcmp (fmt_str, target_percent_c) == 0)
12808 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12811 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12816 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12819 /* Initialize format string characters in the target charset. */
12822 init_target_chars (void)
12827 target_newline = lang_hooks.to_target_charset ('\n');
12828 target_percent = lang_hooks.to_target_charset ('%');
12829 target_c = lang_hooks.to_target_charset ('c');
12830 target_s = lang_hooks.to_target_charset ('s');
12831 if (target_newline == 0 || target_percent == 0 || target_c == 0
12835 target_percent_c[0] = target_percent;
12836 target_percent_c[1] = target_c;
12837 target_percent_c[2] = '\0';
12839 target_percent_s[0] = target_percent;
12840 target_percent_s[1] = target_s;
12841 target_percent_s[2] = '\0';
12843 target_percent_s_newline[0] = target_percent;
12844 target_percent_s_newline[1] = target_s;
12845 target_percent_s_newline[2] = target_newline;
12846 target_percent_s_newline[3] = '\0';
12853 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12854 and no overflow/underflow occurred. INEXACT is true if M was not
12855 exactly calculated. TYPE is the tree type for the result. This
12856 function assumes that you cleared the MPFR flags and then
12857 calculated M to see if anything subsequently set a flag prior to
12858 entering this function. Return NULL_TREE if any checks fail. */
12861 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12863 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12864 overflow/underflow occurred. If -frounding-math, proceed iff the
12865 result of calling FUNC was exact. */
12866 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12867 && (!flag_rounding_math || !inexact))
12869 REAL_VALUE_TYPE rr;
12871 real_from_mpfr (&rr, m, type, GMP_RNDN);
12872 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12873 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12874 but the mpft_t is not, then we underflowed in the
12876 if (real_isfinite (&rr)
12877 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12879 REAL_VALUE_TYPE rmode;
12881 real_convert (&rmode, TYPE_MODE (type), &rr);
12882 /* Proceed iff the specified mode can hold the value. */
12883 if (real_identical (&rmode, &rr))
12884 return build_real (type, rmode);
12890 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12891 number and no overflow/underflow occurred. INEXACT is true if M
12892 was not exactly calculated. TYPE is the tree type for the result.
12893 This function assumes that you cleared the MPFR flags and then
12894 calculated M to see if anything subsequently set a flag prior to
12895 entering this function. Return NULL_TREE if any checks fail, if
12896 FORCE_CONVERT is true, then bypass the checks. */
12899 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12901 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12902 overflow/underflow occurred. If -frounding-math, proceed iff the
12903 result of calling FUNC was exact. */
12905 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12906 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12907 && (!flag_rounding_math || !inexact)))
12909 REAL_VALUE_TYPE re, im;
12911 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12912 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12913 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12914 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12915 but the mpft_t is not, then we underflowed in the
12918 || (real_isfinite (&re) && real_isfinite (&im)
12919 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12920 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12922 REAL_VALUE_TYPE re_mode, im_mode;
12924 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12925 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12926 /* Proceed iff the specified mode can hold the value. */
12928 || (real_identical (&re_mode, &re)
12929 && real_identical (&im_mode, &im)))
12930 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12931 build_real (TREE_TYPE (type), im_mode));
12937 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12938 FUNC on it and return the resulting value as a tree with type TYPE.
12939 If MIN and/or MAX are not NULL, then the supplied ARG must be
12940 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12941 acceptable values, otherwise they are not. The mpfr precision is
12942 set to the precision of TYPE. We assume that function FUNC returns
12943 zero if the result could be calculated exactly within the requested
12947 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12948 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12951 tree result = NULL_TREE;
12955 /* To proceed, MPFR must exactly represent the target floating point
12956 format, which only happens when the target base equals two. */
12957 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12958 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12960 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12962 if (real_isfinite (ra)
12963 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12964 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12966 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12967 const int prec = fmt->p;
12968 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12972 mpfr_init2 (m, prec);
12973 mpfr_from_real (m, ra, GMP_RNDN);
12974 mpfr_clear_flags ();
12975 inexact = func (m, m, rnd);
12976 result = do_mpfr_ckconv (m, type, inexact);
12984 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12985 FUNC on it and return the resulting value as a tree with type TYPE.
12986 The mpfr precision is set to the precision of TYPE. We assume that
12987 function FUNC returns zero if the result could be calculated
12988 exactly within the requested precision. */
12991 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12992 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12994 tree result = NULL_TREE;
12999 /* To proceed, MPFR must exactly represent the target floating point
13000 format, which only happens when the target base equals two. */
13001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13002 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13003 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13005 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13006 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13008 if (real_isfinite (ra1) && real_isfinite (ra2))
13010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13011 const int prec = fmt->p;
13012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13016 mpfr_inits2 (prec, m1, m2, NULL);
13017 mpfr_from_real (m1, ra1, GMP_RNDN);
13018 mpfr_from_real (m2, ra2, GMP_RNDN);
13019 mpfr_clear_flags ();
13020 inexact = func (m1, m1, m2, rnd);
13021 result = do_mpfr_ckconv (m1, type, inexact);
13022 mpfr_clears (m1, m2, NULL);
13029 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13030 FUNC on it and return the resulting value as a tree with type TYPE.
13031 The mpfr precision is set to the precision of TYPE. We assume that
13032 function FUNC returns zero if the result could be calculated
13033 exactly within the requested precision. */
13036 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13037 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13039 tree result = NULL_TREE;
13045 /* To proceed, MPFR must exactly represent the target floating point
13046 format, which only happens when the target base equals two. */
13047 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13048 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13049 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13050 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13052 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13053 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13054 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13056 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13058 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13059 const int prec = fmt->p;
13060 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13064 mpfr_inits2 (prec, m1, m2, m3, NULL);
13065 mpfr_from_real (m1, ra1, GMP_RNDN);
13066 mpfr_from_real (m2, ra2, GMP_RNDN);
13067 mpfr_from_real (m3, ra3, GMP_RNDN);
13068 mpfr_clear_flags ();
13069 inexact = func (m1, m1, m2, m3, rnd);
13070 result = do_mpfr_ckconv (m1, type, inexact);
13071 mpfr_clears (m1, m2, m3, NULL);
13078 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13079 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13080 If ARG_SINP and ARG_COSP are NULL then the result is returned
13081 as a complex value.
13082 The type is taken from the type of ARG and is used for setting the
13083 precision of the calculation and results. */
13086 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13088 tree const type = TREE_TYPE (arg);
13089 tree result = NULL_TREE;
13093 /* To proceed, MPFR must exactly represent the target floating point
13094 format, which only happens when the target base equals two. */
13095 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13096 && TREE_CODE (arg) == REAL_CST
13097 && !TREE_OVERFLOW (arg))
13099 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13101 if (real_isfinite (ra))
13103 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13104 const int prec = fmt->p;
13105 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13106 tree result_s, result_c;
13110 mpfr_inits2 (prec, m, ms, mc, NULL);
13111 mpfr_from_real (m, ra, GMP_RNDN);
13112 mpfr_clear_flags ();
13113 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13114 result_s = do_mpfr_ckconv (ms, type, inexact);
13115 result_c = do_mpfr_ckconv (mc, type, inexact);
13116 mpfr_clears (m, ms, mc, NULL);
13117 if (result_s && result_c)
13119 /* If we are to return in a complex value do so. */
13120 if (!arg_sinp && !arg_cosp)
13121 return build_complex (build_complex_type (type),
13122 result_c, result_s);
13124 /* Dereference the sin/cos pointer arguments. */
13125 arg_sinp = build_fold_indirect_ref (arg_sinp);
13126 arg_cosp = build_fold_indirect_ref (arg_cosp);
13127 /* Proceed if valid pointer type were passed in. */
13128 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13129 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13131 /* Set the values. */
13132 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13134 TREE_SIDE_EFFECTS (result_s) = 1;
13135 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13137 TREE_SIDE_EFFECTS (result_c) = 1;
13138 /* Combine the assignments into a compound expr. */
13139 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13140 result_s, result_c));
13148 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13149 two-argument mpfr order N Bessel function FUNC on them and return
13150 the resulting value as a tree with type TYPE. The mpfr precision
13151 is set to the precision of TYPE. We assume that function FUNC
13152 returns zero if the result could be calculated exactly within the
13153 requested precision. */
13155 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13156 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13157 const REAL_VALUE_TYPE *min, bool inclusive)
13159 tree result = NULL_TREE;
13164 /* To proceed, MPFR must exactly represent the target floating point
13165 format, which only happens when the target base equals two. */
13166 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13167 && host_integerp (arg1, 0)
13168 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13170 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13171 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13174 && real_isfinite (ra)
13175 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13177 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13178 const int prec = fmt->p;
13179 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13183 mpfr_init2 (m, prec);
13184 mpfr_from_real (m, ra, GMP_RNDN);
13185 mpfr_clear_flags ();
13186 inexact = func (m, n, m, rnd);
13187 result = do_mpfr_ckconv (m, type, inexact);
13195 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13196 the pointer *(ARG_QUO) and return the result. The type is taken
13197 from the type of ARG0 and is used for setting the precision of the
13198 calculation and results. */
13201 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13203 tree const type = TREE_TYPE (arg0);
13204 tree result = NULL_TREE;
13209 /* To proceed, MPFR must exactly represent the target floating point
13210 format, which only happens when the target base equals two. */
13211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13212 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13213 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13215 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13216 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13218 if (real_isfinite (ra0) && real_isfinite (ra1))
13220 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13221 const int prec = fmt->p;
13222 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13227 mpfr_inits2 (prec, m0, m1, NULL);
13228 mpfr_from_real (m0, ra0, GMP_RNDN);
13229 mpfr_from_real (m1, ra1, GMP_RNDN);
13230 mpfr_clear_flags ();
13231 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13232 /* Remquo is independent of the rounding mode, so pass
13233 inexact=0 to do_mpfr_ckconv(). */
13234 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13235 mpfr_clears (m0, m1, NULL);
13238 /* MPFR calculates quo in the host's long so it may
13239 return more bits in quo than the target int can hold
13240 if sizeof(host long) > sizeof(target int). This can
13241 happen even for native compilers in LP64 mode. In
13242 these cases, modulo the quo value with the largest
13243 number that the target int can hold while leaving one
13244 bit for the sign. */
13245 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13246 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13248 /* Dereference the quo pointer argument. */
13249 arg_quo = build_fold_indirect_ref (arg_quo);
13250 /* Proceed iff a valid pointer type was passed in. */
13251 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13253 /* Set the value. */
13254 tree result_quo = fold_build2 (MODIFY_EXPR,
13255 TREE_TYPE (arg_quo), arg_quo,
13256 build_int_cst (NULL, integer_quo));
13257 TREE_SIDE_EFFECTS (result_quo) = 1;
13258 /* Combine the quo assignment with the rem. */
13259 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13260 result_quo, result_rem));
13268 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13269 resulting value as a tree with type TYPE. The mpfr precision is
13270 set to the precision of TYPE. We assume that this mpfr function
13271 returns zero if the result could be calculated exactly within the
13272 requested precision. In addition, the integer pointer represented
13273 by ARG_SG will be dereferenced and set to the appropriate signgam
13277 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13279 tree result = NULL_TREE;
13283 /* To proceed, MPFR must exactly represent the target floating point
13284 format, which only happens when the target base equals two. Also
13285 verify ARG is a constant and that ARG_SG is an int pointer. */
13286 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13287 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13288 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13289 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13291 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13293 /* In addition to NaN and Inf, the argument cannot be zero or a
13294 negative integer. */
13295 if (real_isfinite (ra)
13296 && ra->cl != rvc_zero
13297 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13299 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13300 const int prec = fmt->p;
13301 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13306 mpfr_init2 (m, prec);
13307 mpfr_from_real (m, ra, GMP_RNDN);
13308 mpfr_clear_flags ();
13309 inexact = mpfr_lgamma (m, &sg, m, rnd);
13310 result_lg = do_mpfr_ckconv (m, type, inexact);
13316 /* Dereference the arg_sg pointer argument. */
13317 arg_sg = build_fold_indirect_ref (arg_sg);
13318 /* Assign the signgam value into *arg_sg. */
13319 result_sg = fold_build2 (MODIFY_EXPR,
13320 TREE_TYPE (arg_sg), arg_sg,
13321 build_int_cst (NULL, sg));
13322 TREE_SIDE_EFFECTS (result_sg) = 1;
13323 /* Combine the signgam assignment with the lgamma result. */
13324 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13325 result_sg, result_lg));
13333 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13334 function FUNC on it and return the resulting value as a tree with
13335 type TYPE. The mpfr precision is set to the precision of TYPE. We
13336 assume that function FUNC returns zero if the result could be
13337 calculated exactly within the requested precision. */
13340 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13342 tree result = NULL_TREE;
13346 /* To proceed, MPFR must exactly represent the target floating point
13347 format, which only happens when the target base equals two. */
13348 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13349 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13350 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13352 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13353 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13355 if (real_isfinite (re) && real_isfinite (im))
13357 const struct real_format *const fmt =
13358 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13359 const int prec = fmt->p;
13360 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13361 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13365 mpc_init2 (m, prec);
13366 mpfr_from_real (mpc_realref(m), re, rnd);
13367 mpfr_from_real (mpc_imagref(m), im, rnd);
13368 mpfr_clear_flags ();
13369 inexact = func (m, m, crnd);
13370 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13378 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13379 mpc function FUNC on it and return the resulting value as a tree
13380 with type TYPE. The mpfr precision is set to the precision of
13381 TYPE. We assume that function FUNC returns zero if the result
13382 could be calculated exactly within the requested precision. If
13383 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13384 in the arguments and/or results. */
13387 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13388 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13390 tree result = NULL_TREE;
13395 /* To proceed, MPFR must exactly represent the target floating point
13396 format, which only happens when the target base equals two. */
13397 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13398 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13399 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13401 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13403 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13404 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13405 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13406 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13409 || (real_isfinite (re0) && real_isfinite (im0)
13410 && real_isfinite (re1) && real_isfinite (im1)))
13412 const struct real_format *const fmt =
13413 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13414 const int prec = fmt->p;
13415 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13416 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13420 mpc_init2 (m0, prec);
13421 mpc_init2 (m1, prec);
13422 mpfr_from_real (mpc_realref(m0), re0, rnd);
13423 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13424 mpfr_from_real (mpc_realref(m1), re1, rnd);
13425 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13426 mpfr_clear_flags ();
13427 inexact = func (m0, m0, m1, crnd);
13428 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13438 The functions below provide an alternate interface for folding
13439 builtin function calls presented as GIMPLE_CALL statements rather
13440 than as CALL_EXPRs. The folded result is still expressed as a
13441 tree. There is too much code duplication in the handling of
13442 varargs functions, and a more intrusive re-factoring would permit
13443 better sharing of code between the tree and statement-based
13444 versions of these functions. */
13446 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13447 along with N new arguments specified as the "..." parameters. SKIP
13448 is the number of arguments in STMT to be omitted. This function is used
13449 to do varargs-to-varargs transformations. */
13452 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13454 int oldnargs = gimple_call_num_args (stmt);
13455 int nargs = oldnargs - skip + n;
13456 tree fntype = TREE_TYPE (fndecl);
13457 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13461 location_t loc = gimple_location (stmt);
13463 buffer = XALLOCAVEC (tree, nargs);
13465 for (i = 0; i < n; i++)
13466 buffer[i] = va_arg (ap, tree);
13468 for (j = skip; j < oldnargs; j++, i++)
13469 buffer[i] = gimple_call_arg (stmt, j);
13471 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13474 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13475 a normal call should be emitted rather than expanding the function
13476 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13479 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13481 tree dest, size, len, fn, fmt, flag;
13482 const char *fmt_str;
13483 int nargs = gimple_call_num_args (stmt);
13485 /* Verify the required arguments in the original call. */
13488 dest = gimple_call_arg (stmt, 0);
13489 if (!validate_arg (dest, POINTER_TYPE))
13491 flag = gimple_call_arg (stmt, 1);
13492 if (!validate_arg (flag, INTEGER_TYPE))
13494 size = gimple_call_arg (stmt, 2);
13495 if (!validate_arg (size, INTEGER_TYPE))
13497 fmt = gimple_call_arg (stmt, 3);
13498 if (!validate_arg (fmt, POINTER_TYPE))
13501 if (! host_integerp (size, 1))
13506 if (!init_target_chars ())
13509 /* Check whether the format is a literal string constant. */
13510 fmt_str = c_getstr (fmt);
13511 if (fmt_str != NULL)
13513 /* If the format doesn't contain % args or %%, we know the size. */
13514 if (strchr (fmt_str, target_percent) == 0)
13516 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13517 len = build_int_cstu (size_type_node, strlen (fmt_str));
13519 /* If the format is "%s" and first ... argument is a string literal,
13520 we know the size too. */
13521 else if (fcode == BUILT_IN_SPRINTF_CHK
13522 && strcmp (fmt_str, target_percent_s) == 0)
13528 arg = gimple_call_arg (stmt, 4);
13529 if (validate_arg (arg, POINTER_TYPE))
13531 len = c_strlen (arg, 1);
13532 if (! len || ! host_integerp (len, 1))
13539 if (! integer_all_onesp (size))
13541 if (! len || ! tree_int_cst_lt (len, size))
13545 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13546 or if format doesn't contain % chars or is "%s". */
13547 if (! integer_zerop (flag))
13549 if (fmt_str == NULL)
13551 if (strchr (fmt_str, target_percent) != NULL
13552 && strcmp (fmt_str, target_percent_s))
13556 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13557 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13558 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13562 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13565 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13566 a normal call should be emitted rather than expanding the function
13567 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13568 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13569 passed as second argument. */
13572 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13573 enum built_in_function fcode)
13575 tree dest, size, len, fn, fmt, flag;
13576 const char *fmt_str;
13578 /* Verify the required arguments in the original call. */
13579 if (gimple_call_num_args (stmt) < 5)
13581 dest = gimple_call_arg (stmt, 0);
13582 if (!validate_arg (dest, POINTER_TYPE))
13584 len = gimple_call_arg (stmt, 1);
13585 if (!validate_arg (len, INTEGER_TYPE))
13587 flag = gimple_call_arg (stmt, 2);
13588 if (!validate_arg (flag, INTEGER_TYPE))
13590 size = gimple_call_arg (stmt, 3);
13591 if (!validate_arg (size, INTEGER_TYPE))
13593 fmt = gimple_call_arg (stmt, 4);
13594 if (!validate_arg (fmt, POINTER_TYPE))
13597 if (! host_integerp (size, 1))
13600 if (! integer_all_onesp (size))
13602 if (! host_integerp (len, 1))
13604 /* If LEN is not constant, try MAXLEN too.
13605 For MAXLEN only allow optimizing into non-_ocs function
13606 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13607 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13613 if (tree_int_cst_lt (size, maxlen))
13617 if (!init_target_chars ())
13620 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13621 or if format doesn't contain % chars or is "%s". */
13622 if (! integer_zerop (flag))
13624 fmt_str = c_getstr (fmt);
13625 if (fmt_str == NULL)
13627 if (strchr (fmt_str, target_percent) != NULL
13628 && strcmp (fmt_str, target_percent_s))
13632 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13634 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13635 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13639 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13642 /* Builtins with folding operations that operate on "..." arguments
13643 need special handling; we need to store the arguments in a convenient
13644 data structure before attempting any folding. Fortunately there are
13645 only a few builtins that fall into this category. FNDECL is the
13646 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13647 result of the function call is ignored. */
13650 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13651 bool ignore ATTRIBUTE_UNUSED)
13653 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13654 tree ret = NULL_TREE;
13658 case BUILT_IN_SPRINTF_CHK:
13659 case BUILT_IN_VSPRINTF_CHK:
13660 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13663 case BUILT_IN_SNPRINTF_CHK:
13664 case BUILT_IN_VSNPRINTF_CHK:
13665 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13672 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13673 TREE_NO_WARNING (ret) = 1;
13679 /* A wrapper function for builtin folding that prevents warnings for
13680 "statement without effect" and the like, caused by removing the
13681 call node earlier than the warning is generated. */
13684 fold_call_stmt (gimple stmt, bool ignore)
13686 tree ret = NULL_TREE;
13687 tree fndecl = gimple_call_fndecl (stmt);
13688 location_t loc = gimple_location (stmt);
13690 && TREE_CODE (fndecl) == FUNCTION_DECL
13691 && DECL_BUILT_IN (fndecl)
13692 && !gimple_call_va_arg_pack_p (stmt))
13694 int nargs = gimple_call_num_args (stmt);
13696 if (avoid_folding_inline_builtin (fndecl))
13698 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13700 return targetm.fold_builtin (fndecl, nargs,
13702 ? gimple_call_arg_ptr (stmt, 0)
13703 : &error_mark_node), ignore);
13707 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13709 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13711 for (i = 0; i < nargs; i++)
13712 args[i] = gimple_call_arg (stmt, i);
13713 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13716 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13719 /* Propagate location information from original call to
13720 expansion of builtin. Otherwise things like
13721 maybe_emit_chk_warning, that operate on the expansion
13722 of a builtin, will use the wrong location information. */
13723 if (gimple_has_location (stmt))
13725 tree realret = ret;
13726 if (TREE_CODE (ret) == NOP_EXPR)
13727 realret = TREE_OPERAND (ret, 0);
13728 if (CAN_HAVE_LOCATION_P (realret)
13729 && !EXPR_HAS_LOCATION (realret))
13730 SET_EXPR_LOCATION (realret, loc);
13740 /* Look up the function in built_in_decls that corresponds to DECL
13741 and set ASMSPEC as its user assembler name. DECL must be a
13742 function decl that declares a builtin. */
13745 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13748 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13749 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13752 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13753 set_user_assembler_name (builtin, asmspec);
13754 switch (DECL_FUNCTION_CODE (decl))
13756 case BUILT_IN_MEMCPY:
13757 init_block_move_fn (asmspec);
13758 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13760 case BUILT_IN_MEMSET:
13761 init_block_clear_fn (asmspec);
13762 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13764 case BUILT_IN_MEMMOVE:
13765 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13767 case BUILT_IN_MEMCMP:
13768 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13770 case BUILT_IN_ABORT:
13771 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13774 if (INT_TYPE_SIZE < BITS_PER_WORD)
13776 set_user_assembler_libfunc ("ffs", asmspec);
13777 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13778 MODE_INT, 0), "ffs");