1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
64 struct target_builtins default_target_builtins;
66 struct target_builtins *this_target_builtins = &default_target_builtins;
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
76 #include "builtins.def"
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 enum machine_mode, int);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_alloca (tree, rtx, bool);
136 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_expect (location_t, tree, tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static bool readonly_data_expr (tree);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_sqrt (location_t, tree, tree);
154 static tree fold_builtin_cbrt (location_t, tree, tree);
155 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_cos (location_t, tree, tree, tree);
158 static tree fold_builtin_cosh (location_t, tree, tree, tree);
159 static tree fold_builtin_tan (tree, tree);
160 static tree fold_builtin_trunc (location_t, tree, tree);
161 static tree fold_builtin_floor (location_t, tree, tree);
162 static tree fold_builtin_ceil (location_t, tree, tree);
163 static tree fold_builtin_round (location_t, tree, tree);
164 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
165 static tree fold_builtin_bitop (tree, tree);
166 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
167 static tree fold_builtin_strchr (location_t, tree, tree, tree);
168 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
170 static tree fold_builtin_strcmp (location_t, tree, tree);
171 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
172 static tree fold_builtin_signbit (location_t, tree, tree);
173 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_isascii (location_t, tree);
175 static tree fold_builtin_toascii (location_t, tree);
176 static tree fold_builtin_isdigit (location_t, tree);
177 static tree fold_builtin_fabs (location_t, tree, tree);
178 static tree fold_builtin_abs (location_t, tree, tree);
179 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
182 static tree fold_builtin_0 (location_t, tree, bool);
183 static tree fold_builtin_1 (location_t, tree, tree, bool);
184 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
186 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
187 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strcat (location_t, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
206 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
207 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
208 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
209 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
210 enum built_in_function);
211 static bool init_target_chars (void);
213 static unsigned HOST_WIDE_INT target_newline;
214 static unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 static char target_percent_c[3];
218 static char target_percent_s[3];
219 static char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
236 is_builtin_name (const char *name)
238 if (strncmp (name, "__builtin_", 10) == 0)
240 if (strncmp (name, "__sync_", 7) == 0)
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
260 called_as_built_in (tree node)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
273 get_object_alignment (tree exp, unsigned int max_align)
275 HOST_WIDE_INT bitsize, bitpos;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
279 unsigned int align, inner;
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp) == CONST_DECL)
289 exp = DECL_INITIAL (exp);
291 && TREE_CODE (exp) != LABEL_DECL)
292 align = DECL_ALIGN (exp);
293 else if (CONSTANT_CLASS_P (exp))
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 #ifdef CONSTANT_ALIGNMENT
297 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
300 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == INDIRECT_REF)
303 align = TYPE_ALIGN (TREE_TYPE (exp));
304 else if (TREE_CODE (exp) == MISALIGNED_INDIRECT_REF)
306 tree op1 = TREE_OPERAND (exp, 1);
307 align = integer_zerop (op1) ? BITS_PER_UNIT : TREE_INT_CST_LOW (op1);
309 else if (TREE_CODE (exp) == MEM_REF)
311 tree addr = TREE_OPERAND (exp, 0);
312 struct ptr_info_def *pi;
313 if (TREE_CODE (addr) == BIT_AND_EXPR
314 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
316 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
317 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
318 align *= BITS_PER_UNIT;
319 addr = TREE_OPERAND (addr, 0);
322 align = BITS_PER_UNIT;
323 if (TREE_CODE (addr) == SSA_NAME
324 && (pi = SSA_NAME_PTR_INFO (addr)))
326 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
327 align = MAX (pi->align * BITS_PER_UNIT, align);
329 else if (TREE_CODE (addr) == ADDR_EXPR)
330 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
332 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
334 else if (TREE_CODE (exp) == TARGET_MEM_REF
336 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
338 struct ptr_info_def *pi;
339 tree addr = TMR_BASE (exp);
340 if (TREE_CODE (addr) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
344 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
345 align *= BITS_PER_UNIT;
346 addr = TREE_OPERAND (addr, 0);
349 align = BITS_PER_UNIT;
350 if (TREE_CODE (addr) == SSA_NAME
351 && (pi = SSA_NAME_PTR_INFO (addr)))
353 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
354 align = MAX (pi->align * BITS_PER_UNIT, align);
356 else if (TREE_CODE (addr) == ADDR_EXPR)
357 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
359 if (TMR_OFFSET (exp))
360 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
361 if (TMR_INDEX (exp) && TMR_STEP (exp))
363 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
364 align = MIN (align, (step & -step) * BITS_PER_UNIT);
366 else if (TMR_INDEX (exp))
367 align = BITS_PER_UNIT;
369 else if (TREE_CODE (exp) == TARGET_MEM_REF
372 align = get_object_alignment (TMR_SYMBOL (exp), max_align);
373 if (TMR_OFFSET (exp))
374 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
375 if (TMR_INDEX (exp) && TMR_STEP (exp))
377 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
378 align = MIN (align, (step & -step) * BITS_PER_UNIT);
380 else if (TMR_INDEX (exp))
381 align = BITS_PER_UNIT;
384 align = BITS_PER_UNIT;
386 /* If there is a non-constant offset part extract the maximum
387 alignment that can prevail. */
393 if (TREE_CODE (offset) == PLUS_EXPR)
395 next_offset = TREE_OPERAND (offset, 0);
396 offset = TREE_OPERAND (offset, 1);
400 if (host_integerp (offset, 1))
402 /* Any overflow in calculating offset_bits won't change
405 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
408 inner = MIN (inner, (offset_bits & -offset_bits));
410 else if (TREE_CODE (offset) == MULT_EXPR
411 && host_integerp (TREE_OPERAND (offset, 1), 1))
413 /* Any overflow in calculating offset_factor won't change
415 unsigned offset_factor
416 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
420 inner = MIN (inner, (offset_factor & -offset_factor));
424 inner = MIN (inner, BITS_PER_UNIT);
427 offset = next_offset;
430 /* Alignment is innermost object alignment adjusted by the constant
431 and non-constant offset parts. */
432 align = MIN (align, inner);
433 bitpos = bitpos & (align - 1);
435 /* align and bitpos now specify known low bits of the pointer.
436 ptr & (align - 1) == bitpos. */
439 align = (bitpos & -bitpos);
441 return MIN (align, max_align);
444 /* Returns true iff we can trust that alignment information has been
445 calculated properly. */
448 can_trust_pointer_alignment (void)
450 /* We rely on TER to compute accurate alignment information. */
451 return (optimize && flag_tree_ter);
454 /* Return the alignment in bits of EXP, a pointer valued expression.
455 But don't return more than MAX_ALIGN no matter what.
456 The alignment returned is, by default, the alignment of the thing that
457 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
459 Otherwise, look at the expression to see if we can do better, i.e., if the
460 expression is actually pointing at an object whose alignment is tighter. */
463 get_pointer_alignment (tree exp, unsigned int max_align)
467 if (TREE_CODE (exp) == ADDR_EXPR)
468 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
469 else if (TREE_CODE (exp) == SSA_NAME
470 && POINTER_TYPE_P (TREE_TYPE (exp)))
472 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
475 return BITS_PER_UNIT;
476 if (pi->misalign != 0)
477 align = (pi->misalign & -pi->misalign);
480 return MIN (max_align, align * BITS_PER_UNIT);
483 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
486 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
487 way, because it could contain a zero byte in the middle.
488 TREE_STRING_LENGTH is the size of the character array, not the string.
490 ONLY_VALUE should be nonzero if the result is not going to be emitted
491 into the instruction stream and zero if it is going to be expanded.
492 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
493 is returned, otherwise NULL, since
494 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
495 evaluate the side-effects.
497 The value returned is of type `ssizetype'.
499 Unfortunately, string_constant can't access the values of const char
500 arrays with initializers, so neither can we do so here. */
503 c_strlen (tree src, int only_value)
506 HOST_WIDE_INT offset;
512 if (TREE_CODE (src) == COND_EXPR
513 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
517 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
518 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
519 if (tree_int_cst_equal (len1, len2))
523 if (TREE_CODE (src) == COMPOUND_EXPR
524 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
525 return c_strlen (TREE_OPERAND (src, 1), only_value);
527 if (EXPR_HAS_LOCATION (src))
528 loc = EXPR_LOCATION (src);
530 loc = input_location;
532 src = string_constant (src, &offset_node);
536 max = TREE_STRING_LENGTH (src) - 1;
537 ptr = TREE_STRING_POINTER (src);
539 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
541 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
542 compute the offset to the following null if we don't know where to
543 start searching for it. */
546 for (i = 0; i < max; i++)
550 /* We don't know the starting offset, but we do know that the string
551 has no internal zero bytes. We can assume that the offset falls
552 within the bounds of the string; otherwise, the programmer deserves
553 what he gets. Subtract the offset from the length of the string,
554 and return that. This would perhaps not be valid if we were dealing
555 with named arrays in addition to literal string constants. */
557 return size_diffop_loc (loc, size_int (max), offset_node);
560 /* We have a known offset into the string. Start searching there for
561 a null character if we can represent it as a single HOST_WIDE_INT. */
562 if (offset_node == 0)
564 else if (! host_integerp (offset_node, 0))
567 offset = tree_low_cst (offset_node, 0);
569 /* If the offset is known to be out of bounds, warn, and call strlen at
571 if (offset < 0 || offset > max)
573 /* Suppress multiple warnings for propagated constant strings. */
574 if (! TREE_NO_WARNING (src))
576 warning_at (loc, 0, "offset outside bounds of constant string");
577 TREE_NO_WARNING (src) = 1;
582 /* Use strlen to search for the first zero byte. Since any strings
583 constructed with build_string will have nulls appended, we win even
584 if we get handed something like (char[4])"abcd".
586 Since OFFSET is our starting index into the string, no further
587 calculation is needed. */
588 return ssize_int (strlen (ptr + offset));
591 /* Return a char pointer for a C string if it is a string constant
592 or sum of string constant and integer constant. */
599 src = string_constant (src, &offset_node);
603 if (offset_node == 0)
604 return TREE_STRING_POINTER (src);
605 else if (!host_integerp (offset_node, 1)
606 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
609 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
612 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
613 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
616 c_readstr (const char *str, enum machine_mode mode)
622 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
627 for (i = 0; i < GET_MODE_SIZE (mode); i++)
630 if (WORDS_BIG_ENDIAN)
631 j = GET_MODE_SIZE (mode) - i - 1;
632 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
633 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
634 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
636 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
639 ch = (unsigned char) str[i];
640 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
642 return immed_double_const (c[0], c[1], mode);
645 /* Cast a target constant CST to target CHAR and if that value fits into
646 host char type, return zero and put that value into variable pointed to by
650 target_char_cast (tree cst, char *p)
652 unsigned HOST_WIDE_INT val, hostval;
654 if (!host_integerp (cst, 1)
655 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
658 val = tree_low_cst (cst, 1);
659 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
660 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
663 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
664 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
673 /* Similar to save_expr, but assumes that arbitrary code is not executed
674 in between the multiple evaluations. In particular, we assume that a
675 non-addressable local variable will not be modified. */
678 builtin_save_expr (tree exp)
680 if (TREE_ADDRESSABLE (exp) == 0
681 && (TREE_CODE (exp) == PARM_DECL
682 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
685 return save_expr (exp);
688 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
689 times to get the address of either a higher stack frame, or a return
690 address located within it (depending on FNDECL_CODE). */
693 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
697 #ifdef INITIAL_FRAME_ADDRESS_RTX
698 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
702 /* For a zero count with __builtin_return_address, we don't care what
703 frame address we return, because target-specific definitions will
704 override us. Therefore frame pointer elimination is OK, and using
705 the soft frame pointer is OK.
707 For a nonzero count, or a zero count with __builtin_frame_address,
708 we require a stable offset from the current frame pointer to the
709 previous one, so we must use the hard frame pointer, and
710 we must disable frame pointer elimination. */
711 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
712 tem = frame_pointer_rtx;
715 tem = hard_frame_pointer_rtx;
717 /* Tell reload not to eliminate the frame pointer. */
718 crtl->accesses_prior_frames = 1;
722 /* Some machines need special handling before we can access
723 arbitrary frames. For example, on the SPARC, we must first flush
724 all register windows to the stack. */
725 #ifdef SETUP_FRAME_ADDRESSES
727 SETUP_FRAME_ADDRESSES ();
730 /* On the SPARC, the return address is not in the frame, it is in a
731 register. There is no way to access it off of the current frame
732 pointer, but it can be accessed off the previous frame pointer by
733 reading the value from the register window save area. */
734 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
735 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
739 /* Scan back COUNT frames to the specified frame. */
740 for (i = 0; i < count; i++)
742 /* Assume the dynamic chain pointer is in the word that the
743 frame address points to, unless otherwise specified. */
744 #ifdef DYNAMIC_CHAIN_ADDRESS
745 tem = DYNAMIC_CHAIN_ADDRESS (tem);
747 tem = memory_address (Pmode, tem);
748 tem = gen_frame_mem (Pmode, tem);
749 tem = copy_to_reg (tem);
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
755 #ifdef FRAME_ADDR_RTX
756 return FRAME_ADDR_RTX (tem);
761 /* For __builtin_return_address, get the return address from that frame. */
762 #ifdef RETURN_ADDR_RTX
763 tem = RETURN_ADDR_RTX (count, tem);
765 tem = memory_address (Pmode,
766 plus_constant (tem, GET_MODE_SIZE (Pmode)));
767 tem = gen_frame_mem (Pmode, tem);
772 /* Alias set used for setjmp buffer. */
773 static alias_set_type setjmp_alias_set = -1;
775 /* Construct the leading half of a __builtin_setjmp call. Control will
776 return to RECEIVER_LABEL. This is also called directly by the SJLJ
777 exception handling code. */
780 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
782 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
786 if (setjmp_alias_set == -1)
787 setjmp_alias_set = new_alias_set ();
789 buf_addr = convert_memory_address (Pmode, buf_addr);
791 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
793 /* We store the frame pointer and the address of receiver_label in
794 the buffer and use the rest of it for the stack save area, which
795 is machine-dependent. */
797 mem = gen_rtx_MEM (Pmode, buf_addr);
798 set_mem_alias_set (mem, setjmp_alias_set);
799 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
801 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
802 set_mem_alias_set (mem, setjmp_alias_set);
804 emit_move_insn (validize_mem (mem),
805 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
807 stack_save = gen_rtx_MEM (sa_mode,
808 plus_constant (buf_addr,
809 2 * GET_MODE_SIZE (Pmode)));
810 set_mem_alias_set (stack_save, setjmp_alias_set);
811 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
813 /* If there is further processing to do, do it. */
814 #ifdef HAVE_builtin_setjmp_setup
815 if (HAVE_builtin_setjmp_setup)
816 emit_insn (gen_builtin_setjmp_setup (buf_addr));
819 /* Tell optimize_save_area_alloca that extra work is going to
820 need to go on during alloca. */
821 cfun->calls_setjmp = 1;
823 /* We have a nonlocal label. */
824 cfun->has_nonlocal_label = 1;
827 /* Construct the trailing part of a __builtin_setjmp call. This is
828 also called directly by the SJLJ exception handling code. */
831 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
835 /* Clobber the FP when we get here, so we have to make sure it's
836 marked as used by this function. */
837 emit_use (hard_frame_pointer_rtx);
839 /* Mark the static chain as clobbered here so life information
840 doesn't get messed up for it. */
841 chain = targetm.calls.static_chain (current_function_decl, true);
842 if (chain && REG_P (chain))
843 emit_clobber (chain);
845 /* Now put in the code to restore the frame pointer, and argument
846 pointer, if needed. */
847 #ifdef HAVE_nonlocal_goto
848 if (! HAVE_nonlocal_goto)
851 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
852 /* This might change the hard frame pointer in ways that aren't
853 apparent to early optimization passes, so force a clobber. */
854 emit_clobber (hard_frame_pointer_rtx);
857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
858 if (fixed_regs[ARG_POINTER_REGNUM])
860 #ifdef ELIMINABLE_REGS
862 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
864 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
865 if (elim_regs[i].from == ARG_POINTER_REGNUM
866 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
869 if (i == ARRAY_SIZE (elim_regs))
872 /* Now restore our arg pointer from the address at which it
873 was saved in our stack frame. */
874 emit_move_insn (crtl->args.internal_arg_pointer,
875 copy_to_reg (get_arg_pointer_save_area ()));
880 #ifdef HAVE_builtin_setjmp_receiver
881 if (HAVE_builtin_setjmp_receiver)
882 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
885 #ifdef HAVE_nonlocal_goto_receiver
886 if (HAVE_nonlocal_goto_receiver)
887 emit_insn (gen_nonlocal_goto_receiver ());
892 /* We must not allow the code we just generated to be reordered by
893 scheduling. Specifically, the update of the frame pointer must
894 happen immediately, not later. */
895 emit_insn (gen_blockage ());
898 /* __builtin_longjmp is passed a pointer to an array of five words (not
899 all will be used on all machines). It operates similarly to the C
900 library function of the same name, but is more efficient. Much of
901 the code below is copied from the handling of non-local gotos. */
904 expand_builtin_longjmp (rtx buf_addr, rtx value)
906 rtx fp, lab, stack, insn, last;
907 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
909 /* DRAP is needed for stack realign if longjmp is expanded to current
911 if (SUPPORTS_STACK_ALIGNMENT)
912 crtl->need_drap = true;
914 if (setjmp_alias_set == -1)
915 setjmp_alias_set = new_alias_set ();
917 buf_addr = convert_memory_address (Pmode, buf_addr);
919 buf_addr = force_reg (Pmode, buf_addr);
921 /* We require that the user must pass a second argument of 1, because
922 that is what builtin_setjmp will return. */
923 gcc_assert (value == const1_rtx);
925 last = get_last_insn ();
926 #ifdef HAVE_builtin_longjmp
927 if (HAVE_builtin_longjmp)
928 emit_insn (gen_builtin_longjmp (buf_addr));
932 fp = gen_rtx_MEM (Pmode, buf_addr);
933 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
934 GET_MODE_SIZE (Pmode)));
936 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
937 2 * GET_MODE_SIZE (Pmode)));
938 set_mem_alias_set (fp, setjmp_alias_set);
939 set_mem_alias_set (lab, setjmp_alias_set);
940 set_mem_alias_set (stack, setjmp_alias_set);
942 /* Pick up FP, label, and SP from the block and jump. This code is
943 from expand_goto in stmt.c; see there for detailed comments. */
944 #ifdef HAVE_nonlocal_goto
945 if (HAVE_nonlocal_goto)
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
953 lab = copy_to_reg (lab);
955 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
956 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
958 emit_move_insn (hard_frame_pointer_rtx, fp);
959 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
963 emit_indirect_jump (lab);
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 gcc_assert (insn != last);
978 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
981 else if (CALL_P (insn))
986 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
987 and the address of the save area. */
990 expand_builtin_nonlocal_goto (tree exp)
992 tree t_label, t_save_area;
993 rtx r_label, r_save_area, r_fp, r_sp, insn;
995 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
998 t_label = CALL_EXPR_ARG (exp, 0);
999 t_save_area = CALL_EXPR_ARG (exp, 1);
1001 r_label = expand_normal (t_label);
1002 r_label = convert_memory_address (Pmode, r_label);
1003 r_save_area = expand_normal (t_save_area);
1004 r_save_area = convert_memory_address (Pmode, r_save_area);
1005 /* Copy the address of the save location to a register just in case it was based
1006 on the frame pointer. */
1007 r_save_area = copy_to_reg (r_save_area);
1008 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1009 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1010 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1012 crtl->has_nonlocal_goto = 1;
1014 #ifdef HAVE_nonlocal_goto
1015 /* ??? We no longer need to pass the static chain value, afaik. */
1016 if (HAVE_nonlocal_goto)
1017 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1021 r_label = copy_to_reg (r_label);
1023 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1024 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1026 /* Restore frame pointer for containing function.
1027 This sets the actual hard register used for the frame pointer
1028 to the location of the function's incoming static chain info.
1029 The non-local goto handler will then adjust it to contain the
1030 proper value and reload the argument pointer, if needed. */
1031 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1032 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1034 /* USE of hard_frame_pointer_rtx added for consistency;
1035 not clear if really needed. */
1036 emit_use (hard_frame_pointer_rtx);
1037 emit_use (stack_pointer_rtx);
1039 /* If the architecture is using a GP register, we must
1040 conservatively assume that the target function makes use of it.
1041 The prologue of functions with nonlocal gotos must therefore
1042 initialize the GP register to the appropriate value, and we
1043 must then make sure that this value is live at the point
1044 of the jump. (Note that this doesn't necessarily apply
1045 to targets with a nonlocal_goto pattern; they are free
1046 to implement it in their own way. Note also that this is
1047 a no-op if the GP register is a global invariant.) */
1048 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1049 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1050 emit_use (pic_offset_table_rtx);
1052 emit_indirect_jump (r_label);
1055 /* Search backwards to the jump insn and mark it as a
1057 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1061 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1064 else if (CALL_P (insn))
1071 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1072 (not all will be used on all machines) that was passed to __builtin_setjmp.
1073 It updates the stack pointer in that block to correspond to the current
1077 expand_builtin_update_setjmp_buf (rtx buf_addr)
1079 enum machine_mode sa_mode = Pmode;
1083 #ifdef HAVE_save_stack_nonlocal
1084 if (HAVE_save_stack_nonlocal)
1085 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1087 #ifdef STACK_SAVEAREA_MODE
1088 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1092 = gen_rtx_MEM (sa_mode,
1095 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1099 emit_insn (gen_setjmp ());
1102 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1105 /* Expand a call to __builtin_prefetch. For a target that does not support
1106 data prefetch, evaluate the memory address argument in case it has side
1110 expand_builtin_prefetch (tree exp)
1112 tree arg0, arg1, arg2;
1116 if (!validate_arglist (exp, POINTER_TYPE, 0))
1119 arg0 = CALL_EXPR_ARG (exp, 0);
1121 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1122 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1124 nargs = call_expr_nargs (exp);
1126 arg1 = CALL_EXPR_ARG (exp, 1);
1128 arg1 = integer_zero_node;
1130 arg2 = CALL_EXPR_ARG (exp, 2);
1132 arg2 = integer_three_node;
1134 /* Argument 0 is an address. */
1135 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1137 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1138 if (TREE_CODE (arg1) != INTEGER_CST)
1140 error ("second argument to %<__builtin_prefetch%> must be a constant");
1141 arg1 = integer_zero_node;
1143 op1 = expand_normal (arg1);
1144 /* Argument 1 must be either zero or one. */
1145 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1147 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1152 /* Argument 2 (locality) must be a compile-time constant int. */
1153 if (TREE_CODE (arg2) != INTEGER_CST)
1155 error ("third argument to %<__builtin_prefetch%> must be a constant");
1156 arg2 = integer_zero_node;
1158 op2 = expand_normal (arg2);
1159 /* Argument 2 must be 0, 1, 2, or 3. */
1160 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1162 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1166 #ifdef HAVE_prefetch
1169 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1171 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1172 || (GET_MODE (op0) != Pmode))
1174 op0 = convert_memory_address (Pmode, op0);
1175 op0 = force_reg (Pmode, op0);
1177 emit_insn (gen_prefetch (op0, op1, op2));
1181 /* Don't do anything with direct references to volatile memory, but
1182 generate code to handle other side effects. */
1183 if (!MEM_P (op0) && side_effects_p (op0))
1187 /* Get a MEM rtx for expression EXP which is the address of an operand
1188 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1189 the maximum length of the block of memory that might be accessed or
1193 get_memory_rtx (tree exp, tree len)
1195 tree orig_exp = exp;
1199 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1200 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1201 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1202 exp = TREE_OPERAND (exp, 0);
1204 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1205 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1207 /* Get an expression we can use to find the attributes to assign to MEM.
1208 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1209 we can. First remove any nops. */
1210 while (CONVERT_EXPR_P (exp)
1211 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1212 exp = TREE_OPERAND (exp, 0);
1215 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1216 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1217 && host_integerp (TREE_OPERAND (exp, 1), 0)
1218 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1219 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1220 else if (TREE_CODE (exp) == ADDR_EXPR)
1221 exp = TREE_OPERAND (exp, 0);
1222 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1223 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1227 /* Honor attributes derived from exp, except for the alias set
1228 (as builtin stringops may alias with anything) and the size
1229 (as stringops may access multiple array elements). */
1232 set_mem_attributes (mem, exp, 0);
1235 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1237 /* Allow the string and memory builtins to overflow from one
1238 field into another, see http://gcc.gnu.org/PR23561.
1239 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1240 memory accessed by the string or memory builtin will fit
1241 within the field. */
1242 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1244 tree mem_expr = MEM_EXPR (mem);
1245 HOST_WIDE_INT offset = -1, length = -1;
1248 while (TREE_CODE (inner) == ARRAY_REF
1249 || CONVERT_EXPR_P (inner)
1250 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1251 || TREE_CODE (inner) == SAVE_EXPR)
1252 inner = TREE_OPERAND (inner, 0);
1254 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1256 if (MEM_OFFSET (mem)
1257 && CONST_INT_P (MEM_OFFSET (mem)))
1258 offset = INTVAL (MEM_OFFSET (mem));
1260 if (offset >= 0 && len && host_integerp (len, 0))
1261 length = tree_low_cst (len, 0);
1263 while (TREE_CODE (inner) == COMPONENT_REF)
1265 tree field = TREE_OPERAND (inner, 1);
1266 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1267 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1269 /* Bitfields are generally not byte-addressable. */
1270 gcc_assert (!DECL_BIT_FIELD (field)
1271 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1272 % BITS_PER_UNIT) == 0
1273 && host_integerp (DECL_SIZE (field), 0)
1274 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1275 % BITS_PER_UNIT) == 0));
1277 /* If we can prove that the memory starting at XEXP (mem, 0) and
1278 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1279 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1280 fields without DECL_SIZE_UNIT like flexible array members. */
1282 && DECL_SIZE_UNIT (field)
1283 && host_integerp (DECL_SIZE_UNIT (field), 0))
1286 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1289 && offset + length <= size)
1294 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1295 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1304 mem_expr = TREE_OPERAND (mem_expr, 0);
1305 inner = TREE_OPERAND (inner, 0);
1308 if (mem_expr == NULL)
1310 if (mem_expr != MEM_EXPR (mem))
1312 set_mem_expr (mem, mem_expr);
1313 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1316 set_mem_alias_set (mem, 0);
1317 set_mem_size (mem, NULL_RTX);
1323 /* Built-in functions to perform an untyped call and return. */
1325 #define apply_args_mode \
1326 (this_target_builtins->x_apply_args_mode)
1327 #define apply_result_mode \
1328 (this_target_builtins->x_apply_result_mode)
1330 /* Return the size required for the block returned by __builtin_apply_args,
1331 and initialize apply_args_mode. */
1334 apply_args_size (void)
1336 static int size = -1;
1339 enum machine_mode mode;
1341 /* The values computed by this function never change. */
1344 /* The first value is the incoming arg-pointer. */
1345 size = GET_MODE_SIZE (Pmode);
1347 /* The second value is the structure value address unless this is
1348 passed as an "invisible" first argument. */
1349 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1350 size += GET_MODE_SIZE (Pmode);
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (FUNCTION_ARG_REGNO_P (regno))
1355 mode = reg_raw_mode[regno];
1357 gcc_assert (mode != VOIDmode);
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_args_mode[regno] = mode;
1367 apply_args_mode[regno] = VOIDmode;
1373 /* Return the size required for the block returned by __builtin_apply,
1374 and initialize apply_result_mode. */
1377 apply_result_size (void)
1379 static int size = -1;
1381 enum machine_mode mode;
1383 /* The values computed by this function never change. */
1388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1389 if (targetm.calls.function_value_regno_p (regno))
1391 mode = reg_raw_mode[regno];
1393 gcc_assert (mode != VOIDmode);
1395 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1396 if (size % align != 0)
1397 size = CEIL (size, align) * align;
1398 size += GET_MODE_SIZE (mode);
1399 apply_result_mode[regno] = mode;
1402 apply_result_mode[regno] = VOIDmode;
1404 /* Allow targets that use untyped_call and untyped_return to override
1405 the size so that machine-specific information can be stored here. */
1406 #ifdef APPLY_RESULT_SIZE
1407 size = APPLY_RESULT_SIZE;
1413 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1414 /* Create a vector describing the result block RESULT. If SAVEP is true,
1415 the result block is used to save the values; otherwise it is used to
1416 restore the values. */
1419 result_vector (int savep, rtx result)
1421 int regno, size, align, nelts;
1422 enum machine_mode mode;
1424 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if ((mode = apply_result_mode[regno]) != VOIDmode)
1430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1431 if (size % align != 0)
1432 size = CEIL (size, align) * align;
1433 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1434 mem = adjust_address (result, mode, size);
1435 savevec[nelts++] = (savep
1436 ? gen_rtx_SET (VOIDmode, mem, reg)
1437 : gen_rtx_SET (VOIDmode, reg, mem));
1438 size += GET_MODE_SIZE (mode);
1440 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1442 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1444 /* Save the state required to perform an untyped call with the same
1445 arguments as were passed to the current function. */
1448 expand_builtin_apply_args_1 (void)
1451 int size, align, regno;
1452 enum machine_mode mode;
1453 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1455 /* Create a block where the arg-pointer, structure value address,
1456 and argument registers can be saved. */
1457 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1459 /* Walk past the arg-pointer and structure value address. */
1460 size = GET_MODE_SIZE (Pmode);
1461 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1462 size += GET_MODE_SIZE (Pmode);
1464 /* Save each register used in calling a function to the block. */
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_args_mode[regno]) != VOIDmode)
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1472 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1474 emit_move_insn (adjust_address (registers, mode, size), tem);
1475 size += GET_MODE_SIZE (mode);
1478 /* Save the arg pointer to the block. */
1479 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1480 #ifdef STACK_GROWS_DOWNWARD
1481 /* We need the pointer as the caller actually passed them to us, not
1482 as we might have pretended they were passed. Make sure it's a valid
1483 operand, as emit_move_insn isn't expected to handle a PLUS. */
1485 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1488 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1490 size = GET_MODE_SIZE (Pmode);
1492 /* Save the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 if (struct_incoming_value)
1496 emit_move_insn (adjust_address (registers, Pmode, size),
1497 copy_to_reg (struct_incoming_value));
1498 size += GET_MODE_SIZE (Pmode);
1501 /* Return the address of the block. */
1502 return copy_addr_to_reg (XEXP (registers, 0));
1505 /* __builtin_apply_args returns block of memory allocated on
1506 the stack into which is stored the arg pointer, structure
1507 value address, static chain, and all the registers that might
1508 possibly be used in performing a function call. The code is
1509 moved to the start of the function so the incoming values are
1513 expand_builtin_apply_args (void)
1515 /* Don't do __builtin_apply_args more than once in a function.
1516 Save the result of the first call and reuse it. */
1517 if (apply_args_value != 0)
1518 return apply_args_value;
1520 /* When this function is called, it means that registers must be
1521 saved on entry to this function. So we migrate the
1522 call to the first insn of this function. */
1527 temp = expand_builtin_apply_args_1 ();
1531 apply_args_value = temp;
1533 /* Put the insns after the NOTE that starts the function.
1534 If this is inside a start_sequence, make the outer-level insn
1535 chain current, so the code is placed at the start of the
1536 function. If internal_arg_pointer is a non-virtual pseudo,
1537 it needs to be placed after the function that initializes
1539 push_topmost_sequence ();
1540 if (REG_P (crtl->args.internal_arg_pointer)
1541 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1542 emit_insn_before (seq, parm_birth_insn);
1544 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1545 pop_topmost_sequence ();
1550 /* Perform an untyped call and save the state required to perform an
1551 untyped return of whatever value was returned by the given function. */
1554 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1556 int size, align, regno;
1557 enum machine_mode mode;
1558 rtx incoming_args, result, reg, dest, src, call_insn;
1559 rtx old_stack_level = 0;
1560 rtx call_fusage = 0;
1561 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1563 arguments = convert_memory_address (Pmode, arguments);
1565 /* Create a block where the return registers can be saved. */
1566 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1568 /* Fetch the arg pointer from the ARGUMENTS block. */
1569 incoming_args = gen_reg_rtx (Pmode);
1570 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1571 #ifndef STACK_GROWS_DOWNWARD
1572 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1573 incoming_args, 0, OPTAB_LIB_WIDEN);
1576 /* Push a new argument block and copy the arguments. Do not allow
1577 the (potential) memcpy call below to interfere with our stack
1579 do_pending_stack_adjust ();
1582 /* Save the stack with nonlocal if available. */
1583 #ifdef HAVE_save_stack_nonlocal
1584 if (HAVE_save_stack_nonlocal)
1585 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1588 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. We can pass TRUE
1592 as the 4th argument because we just saved the stack pointer
1593 and will restore it right after the call. */
1594 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT, TRUE);
1596 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1597 may have already set current_function_calls_alloca to true.
1598 current_function_calls_alloca won't be set if argsize is zero,
1599 so we have to guarantee need_drap is true here. */
1600 if (SUPPORTS_STACK_ALIGNMENT)
1601 crtl->need_drap = true;
1603 dest = virtual_outgoing_args_rtx;
1604 #ifndef STACK_GROWS_DOWNWARD
1605 if (CONST_INT_P (argsize))
1606 dest = plus_constant (dest, -INTVAL (argsize));
1608 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1610 dest = gen_rtx_MEM (BLKmode, dest);
1611 set_mem_align (dest, PARM_BOUNDARY);
1612 src = gen_rtx_MEM (BLKmode, incoming_args);
1613 set_mem_align (src, PARM_BOUNDARY);
1614 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1616 /* Refer to the argument block. */
1618 arguments = gen_rtx_MEM (BLKmode, arguments);
1619 set_mem_align (arguments, PARM_BOUNDARY);
1621 /* Walk past the arg-pointer and structure value address. */
1622 size = GET_MODE_SIZE (Pmode);
1624 size += GET_MODE_SIZE (Pmode);
1626 /* Restore each of the registers previously saved. Make USE insns
1627 for each of these registers for use in making the call. */
1628 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1629 if ((mode = apply_args_mode[regno]) != VOIDmode)
1631 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1632 if (size % align != 0)
1633 size = CEIL (size, align) * align;
1634 reg = gen_rtx_REG (mode, regno);
1635 emit_move_insn (reg, adjust_address (arguments, mode, size));
1636 use_reg (&call_fusage, reg);
1637 size += GET_MODE_SIZE (mode);
1640 /* Restore the structure value address unless this is passed as an
1641 "invisible" first argument. */
1642 size = GET_MODE_SIZE (Pmode);
1645 rtx value = gen_reg_rtx (Pmode);
1646 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1647 emit_move_insn (struct_value, value);
1648 if (REG_P (struct_value))
1649 use_reg (&call_fusage, struct_value);
1650 size += GET_MODE_SIZE (Pmode);
1653 /* All arguments and registers used for the call are set up by now! */
1654 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1656 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1657 and we don't want to load it into a register as an optimization,
1658 because prepare_call_address already did it if it should be done. */
1659 if (GET_CODE (function) != SYMBOL_REF)
1660 function = memory_address (FUNCTION_MODE, function);
1662 /* Generate the actual call instruction and save the return value. */
1663 #ifdef HAVE_untyped_call
1664 if (HAVE_untyped_call)
1665 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1666 result, result_vector (1, result)));
1669 #ifdef HAVE_call_value
1670 if (HAVE_call_value)
1674 /* Locate the unique return register. It is not possible to
1675 express a call that sets more than one return register using
1676 call_value; use untyped_call for that. In fact, untyped_call
1677 only needs to save the return registers in the given block. */
1678 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1679 if ((mode = apply_result_mode[regno]) != VOIDmode)
1681 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1683 valreg = gen_rtx_REG (mode, regno);
1686 emit_call_insn (GEN_CALL_VALUE (valreg,
1687 gen_rtx_MEM (FUNCTION_MODE, function),
1688 const0_rtx, NULL_RTX, const0_rtx));
1690 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1696 /* Find the CALL insn we just emitted, and attach the register usage
1698 call_insn = last_call_insn ();
1699 add_function_usage_to (call_insn, call_fusage);
1701 /* Restore the stack. */
1702 #ifdef HAVE_save_stack_nonlocal
1703 if (HAVE_save_stack_nonlocal)
1704 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1707 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1711 /* Return the address of the result block. */
1712 result = copy_addr_to_reg (XEXP (result, 0));
1713 return convert_memory_address (ptr_mode, result);
1716 /* Perform an untyped return. */
1719 expand_builtin_return (rtx result)
1721 int size, align, regno;
1722 enum machine_mode mode;
1724 rtx call_fusage = 0;
1726 result = convert_memory_address (Pmode, result);
1728 apply_result_size ();
1729 result = gen_rtx_MEM (BLKmode, result);
1731 #ifdef HAVE_untyped_return
1732 if (HAVE_untyped_return)
1734 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1740 /* Restore the return value and note that each value is used. */
1742 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1743 if ((mode = apply_result_mode[regno]) != VOIDmode)
1745 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1746 if (size % align != 0)
1747 size = CEIL (size, align) * align;
1748 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1749 emit_move_insn (reg, adjust_address (result, mode, size));
1751 push_to_sequence (call_fusage);
1753 call_fusage = get_insns ();
1755 size += GET_MODE_SIZE (mode);
1758 /* Put the USE insns before the return. */
1759 emit_insn (call_fusage);
1761 /* Return whatever values was restored by jumping directly to the end
1763 expand_naked_return ();
1766 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1768 static enum type_class
1769 type_to_class (tree type)
1771 switch (TREE_CODE (type))
1773 case VOID_TYPE: return void_type_class;
1774 case INTEGER_TYPE: return integer_type_class;
1775 case ENUMERAL_TYPE: return enumeral_type_class;
1776 case BOOLEAN_TYPE: return boolean_type_class;
1777 case POINTER_TYPE: return pointer_type_class;
1778 case REFERENCE_TYPE: return reference_type_class;
1779 case OFFSET_TYPE: return offset_type_class;
1780 case REAL_TYPE: return real_type_class;
1781 case COMPLEX_TYPE: return complex_type_class;
1782 case FUNCTION_TYPE: return function_type_class;
1783 case METHOD_TYPE: return method_type_class;
1784 case RECORD_TYPE: return record_type_class;
1786 case QUAL_UNION_TYPE: return union_type_class;
1787 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1788 ? string_type_class : array_type_class);
1789 case LANG_TYPE: return lang_type_class;
1790 default: return no_type_class;
1794 /* Expand a call EXP to __builtin_classify_type. */
1797 expand_builtin_classify_type (tree exp)
1799 if (call_expr_nargs (exp))
1800 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1801 return GEN_INT (no_type_class);
1804 /* This helper macro, meant to be used in mathfn_built_in below,
1805 determines which among a set of three builtin math functions is
1806 appropriate for a given type mode. The `F' and `L' cases are
1807 automatically generated from the `double' case. */
1808 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1809 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1810 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1811 fcodel = BUILT_IN_MATHFN##L ; break;
1812 /* Similar to above, but appends _R after any F/L suffix. */
1813 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1814 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1815 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1816 fcodel = BUILT_IN_MATHFN##L_R ; break;
1818 /* Return mathematic function equivalent to FN but operating directly
1819 on TYPE, if available. If IMPLICIT is true find the function in
1820 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1821 can't do the conversion, return zero. */
1824 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1826 tree const *const fn_arr
1827 = implicit ? implicit_built_in_decls : built_in_decls;
1828 enum built_in_function fcode, fcodef, fcodel;
1832 CASE_MATHFN (BUILT_IN_ACOS)
1833 CASE_MATHFN (BUILT_IN_ACOSH)
1834 CASE_MATHFN (BUILT_IN_ASIN)
1835 CASE_MATHFN (BUILT_IN_ASINH)
1836 CASE_MATHFN (BUILT_IN_ATAN)
1837 CASE_MATHFN (BUILT_IN_ATAN2)
1838 CASE_MATHFN (BUILT_IN_ATANH)
1839 CASE_MATHFN (BUILT_IN_CBRT)
1840 CASE_MATHFN (BUILT_IN_CEIL)
1841 CASE_MATHFN (BUILT_IN_CEXPI)
1842 CASE_MATHFN (BUILT_IN_COPYSIGN)
1843 CASE_MATHFN (BUILT_IN_COS)
1844 CASE_MATHFN (BUILT_IN_COSH)
1845 CASE_MATHFN (BUILT_IN_DREM)
1846 CASE_MATHFN (BUILT_IN_ERF)
1847 CASE_MATHFN (BUILT_IN_ERFC)
1848 CASE_MATHFN (BUILT_IN_EXP)
1849 CASE_MATHFN (BUILT_IN_EXP10)
1850 CASE_MATHFN (BUILT_IN_EXP2)
1851 CASE_MATHFN (BUILT_IN_EXPM1)
1852 CASE_MATHFN (BUILT_IN_FABS)
1853 CASE_MATHFN (BUILT_IN_FDIM)
1854 CASE_MATHFN (BUILT_IN_FLOOR)
1855 CASE_MATHFN (BUILT_IN_FMA)
1856 CASE_MATHFN (BUILT_IN_FMAX)
1857 CASE_MATHFN (BUILT_IN_FMIN)
1858 CASE_MATHFN (BUILT_IN_FMOD)
1859 CASE_MATHFN (BUILT_IN_FREXP)
1860 CASE_MATHFN (BUILT_IN_GAMMA)
1861 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1862 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1863 CASE_MATHFN (BUILT_IN_HYPOT)
1864 CASE_MATHFN (BUILT_IN_ILOGB)
1865 CASE_MATHFN (BUILT_IN_INF)
1866 CASE_MATHFN (BUILT_IN_ISINF)
1867 CASE_MATHFN (BUILT_IN_J0)
1868 CASE_MATHFN (BUILT_IN_J1)
1869 CASE_MATHFN (BUILT_IN_JN)
1870 CASE_MATHFN (BUILT_IN_LCEIL)
1871 CASE_MATHFN (BUILT_IN_LDEXP)
1872 CASE_MATHFN (BUILT_IN_LFLOOR)
1873 CASE_MATHFN (BUILT_IN_LGAMMA)
1874 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1875 CASE_MATHFN (BUILT_IN_LLCEIL)
1876 CASE_MATHFN (BUILT_IN_LLFLOOR)
1877 CASE_MATHFN (BUILT_IN_LLRINT)
1878 CASE_MATHFN (BUILT_IN_LLROUND)
1879 CASE_MATHFN (BUILT_IN_LOG)
1880 CASE_MATHFN (BUILT_IN_LOG10)
1881 CASE_MATHFN (BUILT_IN_LOG1P)
1882 CASE_MATHFN (BUILT_IN_LOG2)
1883 CASE_MATHFN (BUILT_IN_LOGB)
1884 CASE_MATHFN (BUILT_IN_LRINT)
1885 CASE_MATHFN (BUILT_IN_LROUND)
1886 CASE_MATHFN (BUILT_IN_MODF)
1887 CASE_MATHFN (BUILT_IN_NAN)
1888 CASE_MATHFN (BUILT_IN_NANS)
1889 CASE_MATHFN (BUILT_IN_NEARBYINT)
1890 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1891 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1892 CASE_MATHFN (BUILT_IN_POW)
1893 CASE_MATHFN (BUILT_IN_POWI)
1894 CASE_MATHFN (BUILT_IN_POW10)
1895 CASE_MATHFN (BUILT_IN_REMAINDER)
1896 CASE_MATHFN (BUILT_IN_REMQUO)
1897 CASE_MATHFN (BUILT_IN_RINT)
1898 CASE_MATHFN (BUILT_IN_ROUND)
1899 CASE_MATHFN (BUILT_IN_SCALB)
1900 CASE_MATHFN (BUILT_IN_SCALBLN)
1901 CASE_MATHFN (BUILT_IN_SCALBN)
1902 CASE_MATHFN (BUILT_IN_SIGNBIT)
1903 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1904 CASE_MATHFN (BUILT_IN_SIN)
1905 CASE_MATHFN (BUILT_IN_SINCOS)
1906 CASE_MATHFN (BUILT_IN_SINH)
1907 CASE_MATHFN (BUILT_IN_SQRT)
1908 CASE_MATHFN (BUILT_IN_TAN)
1909 CASE_MATHFN (BUILT_IN_TANH)
1910 CASE_MATHFN (BUILT_IN_TGAMMA)
1911 CASE_MATHFN (BUILT_IN_TRUNC)
1912 CASE_MATHFN (BUILT_IN_Y0)
1913 CASE_MATHFN (BUILT_IN_Y1)
1914 CASE_MATHFN (BUILT_IN_YN)
1920 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1921 return fn_arr[fcode];
1922 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1923 return fn_arr[fcodef];
1924 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1925 return fn_arr[fcodel];
1930 /* Like mathfn_built_in_1(), but always use the implicit array. */
1933 mathfn_built_in (tree type, enum built_in_function fn)
1935 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1938 /* If errno must be maintained, expand the RTL to check if the result,
1939 TARGET, of a built-in function call, EXP, is NaN, and if so set
1943 expand_errno_check (tree exp, rtx target)
1945 rtx lab = gen_label_rtx ();
1947 /* Test the result; if it is NaN, set errno=EDOM because
1948 the argument was not in the domain. */
1949 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1950 NULL_RTX, NULL_RTX, lab,
1951 /* The jump is very likely. */
1952 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1955 /* If this built-in doesn't throw an exception, set errno directly. */
1956 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1958 #ifdef GEN_ERRNO_RTX
1959 rtx errno_rtx = GEN_ERRNO_RTX;
1962 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1964 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1970 /* Make sure the library call isn't expanded as a tail call. */
1971 CALL_EXPR_TAILCALL (exp) = 0;
1973 /* We can't set errno=EDOM directly; let the library call do it.
1974 Pop the arguments right away in case the call gets deleted. */
1976 expand_call (exp, target, 0);
1981 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1982 Return NULL_RTX if a normal call should be emitted rather than expanding
1983 the function in-line. EXP is the expression that is a call to the builtin
1984 function; if convenient, the result should be placed in TARGET.
1985 SUBTARGET may be used as the target for computing one of EXP's operands. */
1988 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1990 optab builtin_optab;
1992 tree fndecl = get_callee_fndecl (exp);
1993 enum machine_mode mode;
1994 bool errno_set = false;
1997 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2000 arg = CALL_EXPR_ARG (exp, 0);
2002 switch (DECL_FUNCTION_CODE (fndecl))
2004 CASE_FLT_FN (BUILT_IN_SQRT):
2005 errno_set = ! tree_expr_nonnegative_p (arg);
2006 builtin_optab = sqrt_optab;
2008 CASE_FLT_FN (BUILT_IN_EXP):
2009 errno_set = true; builtin_optab = exp_optab; break;
2010 CASE_FLT_FN (BUILT_IN_EXP10):
2011 CASE_FLT_FN (BUILT_IN_POW10):
2012 errno_set = true; builtin_optab = exp10_optab; break;
2013 CASE_FLT_FN (BUILT_IN_EXP2):
2014 errno_set = true; builtin_optab = exp2_optab; break;
2015 CASE_FLT_FN (BUILT_IN_EXPM1):
2016 errno_set = true; builtin_optab = expm1_optab; break;
2017 CASE_FLT_FN (BUILT_IN_LOGB):
2018 errno_set = true; builtin_optab = logb_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOG):
2020 errno_set = true; builtin_optab = log_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG10):
2022 errno_set = true; builtin_optab = log10_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG2):
2024 errno_set = true; builtin_optab = log2_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOG1P):
2026 errno_set = true; builtin_optab = log1p_optab; break;
2027 CASE_FLT_FN (BUILT_IN_ASIN):
2028 builtin_optab = asin_optab; break;
2029 CASE_FLT_FN (BUILT_IN_ACOS):
2030 builtin_optab = acos_optab; break;
2031 CASE_FLT_FN (BUILT_IN_TAN):
2032 builtin_optab = tan_optab; break;
2033 CASE_FLT_FN (BUILT_IN_ATAN):
2034 builtin_optab = atan_optab; break;
2035 CASE_FLT_FN (BUILT_IN_FLOOR):
2036 builtin_optab = floor_optab; break;
2037 CASE_FLT_FN (BUILT_IN_CEIL):
2038 builtin_optab = ceil_optab; break;
2039 CASE_FLT_FN (BUILT_IN_TRUNC):
2040 builtin_optab = btrunc_optab; break;
2041 CASE_FLT_FN (BUILT_IN_ROUND):
2042 builtin_optab = round_optab; break;
2043 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2044 builtin_optab = nearbyint_optab;
2045 if (flag_trapping_math)
2047 /* Else fallthrough and expand as rint. */
2048 CASE_FLT_FN (BUILT_IN_RINT):
2049 builtin_optab = rint_optab; break;
2050 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2051 builtin_optab = significand_optab; break;
2056 /* Make a suitable register to place result in. */
2057 mode = TYPE_MODE (TREE_TYPE (exp));
2059 if (! flag_errno_math || ! HONOR_NANS (mode))
2062 /* Before working hard, check whether the instruction is available. */
2063 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2065 target = gen_reg_rtx (mode);
2067 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2068 need to expand the argument again. This way, we will not perform
2069 side-effects more the once. */
2070 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2072 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2076 /* Compute into TARGET.
2077 Set TARGET to wherever the result comes back. */
2078 target = expand_unop (mode, builtin_optab, op0, target, 0);
2083 expand_errno_check (exp, target);
2085 /* Output the entire sequence. */
2086 insns = get_insns ();
2092 /* If we were unable to expand via the builtin, stop the sequence
2093 (without outputting the insns) and call to the library function
2094 with the stabilized argument list. */
2098 return expand_call (exp, target, target == const0_rtx);
2101 /* Expand a call to the builtin binary math functions (pow and atan2).
2102 Return NULL_RTX if a normal call should be emitted rather than expanding the
2103 function in-line. EXP is the expression that is a call to the builtin
2104 function; if convenient, the result should be placed in TARGET.
2105 SUBTARGET may be used as the target for computing one of EXP's
2109 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2111 optab builtin_optab;
2112 rtx op0, op1, insns;
2113 int op1_type = REAL_TYPE;
2114 tree fndecl = get_callee_fndecl (exp);
2116 enum machine_mode mode;
2117 bool errno_set = true;
2119 switch (DECL_FUNCTION_CODE (fndecl))
2121 CASE_FLT_FN (BUILT_IN_SCALBN):
2122 CASE_FLT_FN (BUILT_IN_SCALBLN):
2123 CASE_FLT_FN (BUILT_IN_LDEXP):
2124 op1_type = INTEGER_TYPE;
2129 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2132 arg0 = CALL_EXPR_ARG (exp, 0);
2133 arg1 = CALL_EXPR_ARG (exp, 1);
2135 switch (DECL_FUNCTION_CODE (fndecl))
2137 CASE_FLT_FN (BUILT_IN_POW):
2138 builtin_optab = pow_optab; break;
2139 CASE_FLT_FN (BUILT_IN_ATAN2):
2140 builtin_optab = atan2_optab; break;
2141 CASE_FLT_FN (BUILT_IN_SCALB):
2142 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2144 builtin_optab = scalb_optab; break;
2145 CASE_FLT_FN (BUILT_IN_SCALBN):
2146 CASE_FLT_FN (BUILT_IN_SCALBLN):
2147 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2149 /* Fall through... */
2150 CASE_FLT_FN (BUILT_IN_LDEXP):
2151 builtin_optab = ldexp_optab; break;
2152 CASE_FLT_FN (BUILT_IN_FMOD):
2153 builtin_optab = fmod_optab; break;
2154 CASE_FLT_FN (BUILT_IN_REMAINDER):
2155 CASE_FLT_FN (BUILT_IN_DREM):
2156 builtin_optab = remainder_optab; break;
2161 /* Make a suitable register to place result in. */
2162 mode = TYPE_MODE (TREE_TYPE (exp));
2164 /* Before working hard, check whether the instruction is available. */
2165 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2168 target = gen_reg_rtx (mode);
2170 if (! flag_errno_math || ! HONOR_NANS (mode))
2173 /* Always stabilize the argument list. */
2174 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2175 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2177 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2178 op1 = expand_normal (arg1);
2182 /* Compute into TARGET.
2183 Set TARGET to wherever the result comes back. */
2184 target = expand_binop (mode, builtin_optab, op0, op1,
2185 target, 0, OPTAB_DIRECT);
2187 /* If we were unable to expand via the builtin, stop the sequence
2188 (without outputting the insns) and call to the library function
2189 with the stabilized argument list. */
2193 return expand_call (exp, target, target == const0_rtx);
2197 expand_errno_check (exp, target);
2199 /* Output the entire sequence. */
2200 insns = get_insns ();
2207 /* Expand a call to the builtin sin and cos math functions.
2208 Return NULL_RTX if a normal call should be emitted rather than expanding the
2209 function in-line. EXP is the expression that is a call to the builtin
2210 function; if convenient, the result should be placed in TARGET.
2211 SUBTARGET may be used as the target for computing one of EXP's
2215 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2217 optab builtin_optab;
2219 tree fndecl = get_callee_fndecl (exp);
2220 enum machine_mode mode;
2223 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2226 arg = CALL_EXPR_ARG (exp, 0);
2228 switch (DECL_FUNCTION_CODE (fndecl))
2230 CASE_FLT_FN (BUILT_IN_SIN):
2231 CASE_FLT_FN (BUILT_IN_COS):
2232 builtin_optab = sincos_optab; break;
2237 /* Make a suitable register to place result in. */
2238 mode = TYPE_MODE (TREE_TYPE (exp));
2240 /* Check if sincos insn is available, otherwise fallback
2241 to sin or cos insn. */
2242 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2243 switch (DECL_FUNCTION_CODE (fndecl))
2245 CASE_FLT_FN (BUILT_IN_SIN):
2246 builtin_optab = sin_optab; break;
2247 CASE_FLT_FN (BUILT_IN_COS):
2248 builtin_optab = cos_optab; break;
2253 /* Before working hard, check whether the instruction is available. */
2254 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2256 target = gen_reg_rtx (mode);
2258 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2259 need to expand the argument again. This way, we will not perform
2260 side-effects more the once. */
2261 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2263 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2267 /* Compute into TARGET.
2268 Set TARGET to wherever the result comes back. */
2269 if (builtin_optab == sincos_optab)
2273 switch (DECL_FUNCTION_CODE (fndecl))
2275 CASE_FLT_FN (BUILT_IN_SIN):
2276 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2278 CASE_FLT_FN (BUILT_IN_COS):
2279 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2284 gcc_assert (result);
2288 target = expand_unop (mode, builtin_optab, op0, target, 0);
2293 /* Output the entire sequence. */
2294 insns = get_insns ();
2300 /* If we were unable to expand via the builtin, stop the sequence
2301 (without outputting the insns) and call to the library function
2302 with the stabilized argument list. */
2306 target = expand_call (exp, target, target == const0_rtx);
2311 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2312 return an RTL instruction code that implements the functionality.
2313 If that isn't possible or available return CODE_FOR_nothing. */
2315 static enum insn_code
2316 interclass_mathfn_icode (tree arg, tree fndecl)
2318 bool errno_set = false;
2319 optab builtin_optab = 0;
2320 enum machine_mode mode;
2322 switch (DECL_FUNCTION_CODE (fndecl))
2324 CASE_FLT_FN (BUILT_IN_ILOGB):
2325 errno_set = true; builtin_optab = ilogb_optab; break;
2326 CASE_FLT_FN (BUILT_IN_ISINF):
2327 builtin_optab = isinf_optab; break;
2328 case BUILT_IN_ISNORMAL:
2329 case BUILT_IN_ISFINITE:
2330 CASE_FLT_FN (BUILT_IN_FINITE):
2331 case BUILT_IN_FINITED32:
2332 case BUILT_IN_FINITED64:
2333 case BUILT_IN_FINITED128:
2334 case BUILT_IN_ISINFD32:
2335 case BUILT_IN_ISINFD64:
2336 case BUILT_IN_ISINFD128:
2337 /* These builtins have no optabs (yet). */
2343 /* There's no easy way to detect the case we need to set EDOM. */
2344 if (flag_errno_math && errno_set)
2345 return CODE_FOR_nothing;
2347 /* Optab mode depends on the mode of the input argument. */
2348 mode = TYPE_MODE (TREE_TYPE (arg));
2351 return optab_handler (builtin_optab, mode);
2352 return CODE_FOR_nothing;
2355 /* Expand a call to one of the builtin math functions that operate on
2356 floating point argument and output an integer result (ilogb, isinf,
2358 Return 0 if a normal call should be emitted rather than expanding the
2359 function in-line. EXP is the expression that is a call to the builtin
2360 function; if convenient, the result should be placed in TARGET.
2361 SUBTARGET may be used as the target for computing one of EXP's operands. */
2364 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2366 enum insn_code icode = CODE_FOR_nothing;
2368 tree fndecl = get_callee_fndecl (exp);
2369 enum machine_mode mode;
2372 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2375 arg = CALL_EXPR_ARG (exp, 0);
2376 icode = interclass_mathfn_icode (arg, fndecl);
2377 mode = TYPE_MODE (TREE_TYPE (arg));
2379 if (icode != CODE_FOR_nothing)
2381 rtx last = get_last_insn ();
2382 tree orig_arg = arg;
2383 /* Make a suitable register to place result in. */
2385 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2386 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2387 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2389 gcc_assert (insn_data[icode].operand[0].predicate
2390 (target, GET_MODE (target)));
2392 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2393 need to expand the argument again. This way, we will not perform
2394 side-effects more the once. */
2395 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2397 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2399 if (mode != GET_MODE (op0))
2400 op0 = convert_to_mode (mode, op0, 0);
2402 /* Compute into TARGET.
2403 Set TARGET to wherever the result comes back. */
2404 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2406 delete_insns_since (last);
2407 CALL_EXPR_ARG (exp, 0) = orig_arg;
2413 /* Expand a call to the builtin sincos math function.
2414 Return NULL_RTX if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2419 expand_builtin_sincos (tree exp)
2421 rtx op0, op1, op2, target1, target2;
2422 enum machine_mode mode;
2423 tree arg, sinp, cosp;
2425 location_t loc = EXPR_LOCATION (exp);
2427 if (!validate_arglist (exp, REAL_TYPE,
2428 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2431 arg = CALL_EXPR_ARG (exp, 0);
2432 sinp = CALL_EXPR_ARG (exp, 1);
2433 cosp = CALL_EXPR_ARG (exp, 2);
2435 /* Make a suitable register to place result in. */
2436 mode = TYPE_MODE (TREE_TYPE (arg));
2438 /* Check if sincos insn is available, otherwise emit the call. */
2439 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2442 target1 = gen_reg_rtx (mode);
2443 target2 = gen_reg_rtx (mode);
2445 op0 = expand_normal (arg);
2446 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2447 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2449 /* Compute into target1 and target2.
2450 Set TARGET to wherever the result comes back. */
2451 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2452 gcc_assert (result);
2454 /* Move target1 and target2 to the memory locations indicated
2456 emit_move_insn (op1, target1);
2457 emit_move_insn (op2, target2);
2462 /* Expand a call to the internal cexpi builtin to the sincos math function.
2463 EXP is the expression that is a call to the builtin function; if convenient,
2464 the result should be placed in TARGET. SUBTARGET may be used as the target
2465 for computing one of EXP's operands. */
2468 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2470 tree fndecl = get_callee_fndecl (exp);
2472 enum machine_mode mode;
2474 location_t loc = EXPR_LOCATION (exp);
2476 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2479 arg = CALL_EXPR_ARG (exp, 0);
2480 type = TREE_TYPE (arg);
2481 mode = TYPE_MODE (TREE_TYPE (arg));
2483 /* Try expanding via a sincos optab, fall back to emitting a libcall
2484 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2485 is only generated from sincos, cexp or if we have either of them. */
2486 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2488 op1 = gen_reg_rtx (mode);
2489 op2 = gen_reg_rtx (mode);
2491 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2493 /* Compute into op1 and op2. */
2494 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2496 else if (TARGET_HAS_SINCOS)
2498 tree call, fn = NULL_TREE;
2502 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2503 fn = built_in_decls[BUILT_IN_SINCOSF];
2504 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2505 fn = built_in_decls[BUILT_IN_SINCOS];
2506 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2507 fn = built_in_decls[BUILT_IN_SINCOSL];
2511 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2512 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2513 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2514 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2515 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2516 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2518 /* Make sure not to fold the sincos call again. */
2519 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2520 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2521 call, 3, arg, top1, top2));
2525 tree call, fn = NULL_TREE, narg;
2526 tree ctype = build_complex_type (type);
2528 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2529 fn = built_in_decls[BUILT_IN_CEXPF];
2530 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2531 fn = built_in_decls[BUILT_IN_CEXP];
2532 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2533 fn = built_in_decls[BUILT_IN_CEXPL];
2537 /* If we don't have a decl for cexp create one. This is the
2538 friendliest fallback if the user calls __builtin_cexpi
2539 without full target C99 function support. */
2540 if (fn == NULL_TREE)
2543 const char *name = NULL;
2545 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2547 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2549 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2552 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2553 fn = build_fn_decl (name, fntype);
2556 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2557 build_real (type, dconst0), arg);
2559 /* Make sure not to fold the cexp call again. */
2560 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2561 return expand_expr (build_call_nary (ctype, call, 1, narg),
2562 target, VOIDmode, EXPAND_NORMAL);
2565 /* Now build the proper return type. */
2566 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2567 make_tree (TREE_TYPE (arg), op2),
2568 make_tree (TREE_TYPE (arg), op1)),
2569 target, VOIDmode, EXPAND_NORMAL);
2572 /* Conveniently construct a function call expression. FNDECL names the
2573 function to be called, N is the number of arguments, and the "..."
2574 parameters are the argument expressions. Unlike build_call_exr
2575 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2578 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2581 tree fntype = TREE_TYPE (fndecl);
2582 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2585 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2587 SET_EXPR_LOCATION (fn, loc);
2591 /* Expand a call to one of the builtin rounding functions gcc defines
2592 as an extension (lfloor and lceil). As these are gcc extensions we
2593 do not need to worry about setting errno to EDOM.
2594 If expanding via optab fails, lower expression to (int)(floor(x)).
2595 EXP is the expression that is a call to the builtin function;
2596 if convenient, the result should be placed in TARGET. */
2599 expand_builtin_int_roundingfn (tree exp, rtx target)
2601 convert_optab builtin_optab;
2602 rtx op0, insns, tmp;
2603 tree fndecl = get_callee_fndecl (exp);
2604 enum built_in_function fallback_fn;
2605 tree fallback_fndecl;
2606 enum machine_mode mode;
2609 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2612 arg = CALL_EXPR_ARG (exp, 0);
2614 switch (DECL_FUNCTION_CODE (fndecl))
2616 CASE_FLT_FN (BUILT_IN_LCEIL):
2617 CASE_FLT_FN (BUILT_IN_LLCEIL):
2618 builtin_optab = lceil_optab;
2619 fallback_fn = BUILT_IN_CEIL;
2622 CASE_FLT_FN (BUILT_IN_LFLOOR):
2623 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2624 builtin_optab = lfloor_optab;
2625 fallback_fn = BUILT_IN_FLOOR;
2632 /* Make a suitable register to place result in. */
2633 mode = TYPE_MODE (TREE_TYPE (exp));
2635 target = gen_reg_rtx (mode);
2637 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2638 need to expand the argument again. This way, we will not perform
2639 side-effects more the once. */
2640 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2642 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2646 /* Compute into TARGET. */
2647 if (expand_sfix_optab (target, op0, builtin_optab))
2649 /* Output the entire sequence. */
2650 insns = get_insns ();
2656 /* If we were unable to expand via the builtin, stop the sequence
2657 (without outputting the insns). */
2660 /* Fall back to floating point rounding optab. */
2661 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2663 /* For non-C99 targets we may end up without a fallback fndecl here
2664 if the user called __builtin_lfloor directly. In this case emit
2665 a call to the floor/ceil variants nevertheless. This should result
2666 in the best user experience for not full C99 targets. */
2667 if (fallback_fndecl == NULL_TREE)
2670 const char *name = NULL;
2672 switch (DECL_FUNCTION_CODE (fndecl))
2674 case BUILT_IN_LCEIL:
2675 case BUILT_IN_LLCEIL:
2678 case BUILT_IN_LCEILF:
2679 case BUILT_IN_LLCEILF:
2682 case BUILT_IN_LCEILL:
2683 case BUILT_IN_LLCEILL:
2686 case BUILT_IN_LFLOOR:
2687 case BUILT_IN_LLFLOOR:
2690 case BUILT_IN_LFLOORF:
2691 case BUILT_IN_LLFLOORF:
2694 case BUILT_IN_LFLOORL:
2695 case BUILT_IN_LLFLOORL:
2702 fntype = build_function_type_list (TREE_TYPE (arg),
2703 TREE_TYPE (arg), NULL_TREE);
2704 fallback_fndecl = build_fn_decl (name, fntype);
2707 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2709 tmp = expand_normal (exp);
2711 /* Truncate the result of floating point optab to integer
2712 via expand_fix (). */
2713 target = gen_reg_rtx (mode);
2714 expand_fix (target, tmp, 0);
2719 /* Expand a call to one of the builtin math functions doing integer
2721 Return 0 if a normal call should be emitted rather than expanding the
2722 function in-line. EXP is the expression that is a call to the builtin
2723 function; if convenient, the result should be placed in TARGET. */
2726 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2728 convert_optab builtin_optab;
2730 tree fndecl = get_callee_fndecl (exp);
2732 enum machine_mode mode;
2734 /* There's no easy way to detect the case we need to set EDOM. */
2735 if (flag_errno_math)
2738 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2741 arg = CALL_EXPR_ARG (exp, 0);
2743 switch (DECL_FUNCTION_CODE (fndecl))
2745 CASE_FLT_FN (BUILT_IN_LRINT):
2746 CASE_FLT_FN (BUILT_IN_LLRINT):
2747 builtin_optab = lrint_optab; break;
2748 CASE_FLT_FN (BUILT_IN_LROUND):
2749 CASE_FLT_FN (BUILT_IN_LLROUND):
2750 builtin_optab = lround_optab; break;
2755 /* Make a suitable register to place result in. */
2756 mode = TYPE_MODE (TREE_TYPE (exp));
2758 target = gen_reg_rtx (mode);
2760 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2761 need to expand the argument again. This way, we will not perform
2762 side-effects more the once. */
2763 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2765 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2769 if (expand_sfix_optab (target, op0, builtin_optab))
2771 /* Output the entire sequence. */
2772 insns = get_insns ();
2778 /* If we were unable to expand via the builtin, stop the sequence
2779 (without outputting the insns) and call to the library function
2780 with the stabilized argument list. */
2783 target = expand_call (exp, target, target == const0_rtx);
2788 /* To evaluate powi(x,n), the floating point value x raised to the
2789 constant integer exponent n, we use a hybrid algorithm that
2790 combines the "window method" with look-up tables. For an
2791 introduction to exponentiation algorithms and "addition chains",
2792 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2793 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2794 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2795 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2797 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2798 multiplications to inline before calling the system library's pow
2799 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2800 so this default never requires calling pow, powf or powl. */
2802 #ifndef POWI_MAX_MULTS
2803 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2806 /* The size of the "optimal power tree" lookup table. All
2807 exponents less than this value are simply looked up in the
2808 powi_table below. This threshold is also used to size the
2809 cache of pseudo registers that hold intermediate results. */
2810 #define POWI_TABLE_SIZE 256
2812 /* The size, in bits of the window, used in the "window method"
2813 exponentiation algorithm. This is equivalent to a radix of
2814 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2815 #define POWI_WINDOW_SIZE 3
2817 /* The following table is an efficient representation of an
2818 "optimal power tree". For each value, i, the corresponding
2819 value, j, in the table states than an optimal evaluation
2820 sequence for calculating pow(x,i) can be found by evaluating
2821 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2822 100 integers is given in Knuth's "Seminumerical algorithms". */
2824 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2826 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2827 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2828 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2829 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2830 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2831 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2832 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2833 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2834 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2835 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2836 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2837 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2838 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2839 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2840 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2841 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2842 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2843 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2844 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2845 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2846 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2847 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2848 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2849 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2850 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2851 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2852 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2853 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2854 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2855 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2856 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2857 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2861 /* Return the number of multiplications required to calculate
2862 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2863 subroutine of powi_cost. CACHE is an array indicating
2864 which exponents have already been calculated. */
2867 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2869 /* If we've already calculated this exponent, then this evaluation
2870 doesn't require any additional multiplications. */
2875 return powi_lookup_cost (n - powi_table[n], cache)
2876 + powi_lookup_cost (powi_table[n], cache) + 1;
2879 /* Return the number of multiplications required to calculate
2880 powi(x,n) for an arbitrary x, given the exponent N. This
2881 function needs to be kept in sync with expand_powi below. */
2884 powi_cost (HOST_WIDE_INT n)
2886 bool cache[POWI_TABLE_SIZE];
2887 unsigned HOST_WIDE_INT digit;
2888 unsigned HOST_WIDE_INT val;
2894 /* Ignore the reciprocal when calculating the cost. */
2895 val = (n < 0) ? -n : n;
2897 /* Initialize the exponent cache. */
2898 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2903 while (val >= POWI_TABLE_SIZE)
2907 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2908 result += powi_lookup_cost (digit, cache)
2909 + POWI_WINDOW_SIZE + 1;
2910 val >>= POWI_WINDOW_SIZE;
2919 return result + powi_lookup_cost (val, cache);
2922 /* Recursive subroutine of expand_powi. This function takes the array,
2923 CACHE, of already calculated exponents and an exponent N and returns
2924 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2927 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2929 unsigned HOST_WIDE_INT digit;
2933 if (n < POWI_TABLE_SIZE)
2938 target = gen_reg_rtx (mode);
2941 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2942 op1 = expand_powi_1 (mode, powi_table[n], cache);
2946 target = gen_reg_rtx (mode);
2947 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2948 op0 = expand_powi_1 (mode, n - digit, cache);
2949 op1 = expand_powi_1 (mode, digit, cache);
2953 target = gen_reg_rtx (mode);
2954 op0 = expand_powi_1 (mode, n >> 1, cache);
2958 result = expand_mult (mode, op0, op1, target, 0);
2959 if (result != target)
2960 emit_move_insn (target, result);
2964 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2965 floating point operand in mode MODE, and N is the exponent. This
2966 function needs to be kept in sync with powi_cost above. */
2969 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2971 rtx cache[POWI_TABLE_SIZE];
2975 return CONST1_RTX (mode);
2977 memset (cache, 0, sizeof (cache));
2980 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2982 /* If the original exponent was negative, reciprocate the result. */
2984 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2985 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2990 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2991 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2992 if we can simplify it. */
2994 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2997 if (TREE_CODE (arg1) == REAL_CST
2998 && !TREE_OVERFLOW (arg1)
2999 && flag_unsafe_math_optimizations)
3001 enum machine_mode mode = TYPE_MODE (type);
3002 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3003 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3004 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3005 tree op = NULL_TREE;
3009 /* Optimize pow (x, 0.5) into sqrt. */
3010 if (REAL_VALUES_EQUAL (c, dconsthalf))
3011 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3015 REAL_VALUE_TYPE dconst1_4 = dconst1;
3016 REAL_VALUE_TYPE dconst3_4;
3017 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3019 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3020 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3022 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3023 machines that a builtin sqrt instruction is smaller than a
3024 call to pow with 0.25, so do this optimization even if
3026 if (REAL_VALUES_EQUAL (c, dconst1_4))
3028 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3029 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3032 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3033 are optimizing for space. */
3034 else if (optimize_insn_for_speed_p ()
3035 && !TREE_SIDE_EFFECTS (arg0)
3036 && REAL_VALUES_EQUAL (c, dconst3_4))
3038 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3039 tree sqrt2 = builtin_save_expr (sqrt1);
3040 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3041 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3046 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3047 cbrt/sqrts instead of pow (x, 1./6.). */
3049 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3051 /* First try 1/3. */
3052 REAL_VALUE_TYPE dconst1_3
3053 = real_value_truncate (mode, dconst_third ());
3055 if (REAL_VALUES_EQUAL (c, dconst1_3))
3056 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3059 else if (optimize_insn_for_speed_p ())
3061 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3062 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3064 if (REAL_VALUES_EQUAL (c, dconst1_6))
3066 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3067 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3073 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3079 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3080 a normal call should be emitted rather than expanding the function
3081 in-line. EXP is the expression that is a call to the builtin
3082 function; if convenient, the result should be placed in TARGET. */
3085 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3089 tree type = TREE_TYPE (exp);
3090 REAL_VALUE_TYPE cint, c, c2;
3093 enum machine_mode mode = TYPE_MODE (type);
3095 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3098 arg0 = CALL_EXPR_ARG (exp, 0);
3099 arg1 = CALL_EXPR_ARG (exp, 1);
3101 if (TREE_CODE (arg1) != REAL_CST
3102 || TREE_OVERFLOW (arg1))
3103 return expand_builtin_mathfn_2 (exp, target, subtarget);
3105 /* Handle constant exponents. */
3107 /* For integer valued exponents we can expand to an optimal multiplication
3108 sequence using expand_powi. */
3109 c = TREE_REAL_CST (arg1);
3110 n = real_to_integer (&c);
3111 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3112 if (real_identical (&c, &cint)
3113 && ((n >= -1 && n <= 2)
3114 || (flag_unsafe_math_optimizations
3115 && optimize_insn_for_speed_p ()
3116 && powi_cost (n) <= POWI_MAX_MULTS)))
3118 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3121 op = force_reg (mode, op);
3122 op = expand_powi (op, mode, n);
3127 narg0 = builtin_save_expr (arg0);
3129 /* If the exponent is not integer valued, check if it is half of an integer.
3130 In this case we can expand to sqrt (x) * x**(n/2). */
3131 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3132 if (fn != NULL_TREE)
3134 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3135 n = real_to_integer (&c2);
3136 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3137 if (real_identical (&c2, &cint)
3138 && ((flag_unsafe_math_optimizations
3139 && optimize_insn_for_speed_p ()
3140 && powi_cost (n/2) <= POWI_MAX_MULTS)
3141 /* Even the c == 0.5 case cannot be done unconditionally
3142 when we need to preserve signed zeros, as
3143 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3144 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3145 /* For c == 1.5 we can assume that x * sqrt (x) is always
3146 smaller than pow (x, 1.5) if sqrt will not be expanded
3149 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3151 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3153 /* Use expand_expr in case the newly built call expression
3154 was folded to a non-call. */
3155 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3158 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3159 op2 = force_reg (mode, op2);
3160 op2 = expand_powi (op2, mode, abs (n / 2));
3161 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3162 0, OPTAB_LIB_WIDEN);
3163 /* If the original exponent was negative, reciprocate the
3166 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3167 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3173 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3175 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3180 /* Try if the exponent is a third of an integer. In this case
3181 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3182 different from pow (x, 1./3.) due to rounding and behavior
3183 with negative x we need to constrain this transformation to
3184 unsafe math and positive x or finite math. */
3185 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3187 && flag_unsafe_math_optimizations
3188 && (tree_expr_nonnegative_p (arg0)
3189 || !HONOR_NANS (mode)))
3191 REAL_VALUE_TYPE dconst3;
3192 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3193 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3194 real_round (&c2, mode, &c2);
3195 n = real_to_integer (&c2);
3196 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3197 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3198 real_convert (&c2, mode, &c2);
3199 if (real_identical (&c2, &c)
3200 && ((optimize_insn_for_speed_p ()
3201 && powi_cost (n/3) <= POWI_MAX_MULTS)
3204 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3206 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3207 if (abs (n) % 3 == 2)
3208 op = expand_simple_binop (mode, MULT, op, op, op,
3209 0, OPTAB_LIB_WIDEN);
3212 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3213 op2 = force_reg (mode, op2);
3214 op2 = expand_powi (op2, mode, abs (n / 3));
3215 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3216 0, OPTAB_LIB_WIDEN);
3217 /* If the original exponent was negative, reciprocate the
3220 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3221 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3227 /* Fall back to optab expansion. */
3228 return expand_builtin_mathfn_2 (exp, target, subtarget);
3231 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3232 a normal call should be emitted rather than expanding the function
3233 in-line. EXP is the expression that is a call to the builtin
3234 function; if convenient, the result should be placed in TARGET. */
3237 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3241 enum machine_mode mode;
3242 enum machine_mode mode2;
3244 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 arg0 = CALL_EXPR_ARG (exp, 0);
3248 arg1 = CALL_EXPR_ARG (exp, 1);
3249 mode = TYPE_MODE (TREE_TYPE (exp));
3251 /* Handle constant power. */
3253 if (TREE_CODE (arg1) == INTEGER_CST
3254 && !TREE_OVERFLOW (arg1))
3256 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3258 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3259 Otherwise, check the number of multiplications required. */
3260 if ((TREE_INT_CST_HIGH (arg1) == 0
3261 || TREE_INT_CST_HIGH (arg1) == -1)
3262 && ((n >= -1 && n <= 2)
3263 || (optimize_insn_for_speed_p ()
3264 && powi_cost (n) <= POWI_MAX_MULTS)))
3266 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3267 op0 = force_reg (mode, op0);
3268 return expand_powi (op0, mode, n);
3272 /* Emit a libcall to libgcc. */
3274 /* Mode of the 2nd argument must match that of an int. */
3275 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3277 if (target == NULL_RTX)
3278 target = gen_reg_rtx (mode);
3280 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3281 if (GET_MODE (op0) != mode)
3282 op0 = convert_to_mode (mode, op0, 0);
3283 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3284 if (GET_MODE (op1) != mode2)
3285 op1 = convert_to_mode (mode2, op1, 0);
3287 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3288 target, LCT_CONST, mode, 2,
3289 op0, mode, op1, mode2);
3294 /* Expand expression EXP which is a call to the strlen builtin. Return
3295 NULL_RTX if we failed the caller should emit a normal call, otherwise
3296 try to get the result in TARGET, if convenient. */
3299 expand_builtin_strlen (tree exp, rtx target,
3300 enum machine_mode target_mode)
3302 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3308 tree src = CALL_EXPR_ARG (exp, 0);
3309 rtx result, src_reg, char_rtx, before_strlen;
3310 enum machine_mode insn_mode = target_mode, char_mode;
3311 enum insn_code icode = CODE_FOR_nothing;
3314 /* If the length can be computed at compile-time, return it. */
3315 len = c_strlen (src, 0);
3317 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3319 /* If the length can be computed at compile-time and is constant
3320 integer, but there are side-effects in src, evaluate
3321 src for side-effects, then return len.
3322 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3323 can be optimized into: i++; x = 3; */
3324 len = c_strlen (src, 1);
3325 if (len && TREE_CODE (len) == INTEGER_CST)
3327 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3328 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3331 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3333 /* If SRC is not a pointer type, don't do this operation inline. */
3337 /* Bail out if we can't compute strlen in the right mode. */
3338 while (insn_mode != VOIDmode)
3340 icode = optab_handler (strlen_optab, insn_mode);
3341 if (icode != CODE_FOR_nothing)
3344 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3346 if (insn_mode == VOIDmode)
3349 /* Make a place to write the result of the instruction. */
3353 && GET_MODE (result) == insn_mode
3354 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3355 result = gen_reg_rtx (insn_mode);
3357 /* Make a place to hold the source address. We will not expand
3358 the actual source until we are sure that the expansion will
3359 not fail -- there are trees that cannot be expanded twice. */
3360 src_reg = gen_reg_rtx (Pmode);
3362 /* Mark the beginning of the strlen sequence so we can emit the
3363 source operand later. */
3364 before_strlen = get_last_insn ();
3366 char_rtx = const0_rtx;
3367 char_mode = insn_data[(int) icode].operand[2].mode;
3368 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3370 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3372 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3373 char_rtx, GEN_INT (align));
3378 /* Now that we are assured of success, expand the source. */
3380 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3382 emit_move_insn (src_reg, pat);
3387 emit_insn_after (pat, before_strlen);
3389 emit_insn_before (pat, get_insns ());
3391 /* Return the value in the proper mode for this function. */
3392 if (GET_MODE (result) == target_mode)
3394 else if (target != 0)
3395 convert_move (target, result, 0);
3397 target = convert_to_mode (target_mode, result, 0);
3403 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3404 bytes from constant string DATA + OFFSET and return it as target
3408 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3409 enum machine_mode mode)
3411 const char *str = (const char *) data;
3413 gcc_assert (offset >= 0
3414 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3415 <= strlen (str) + 1));
3417 return c_readstr (str + offset, mode);
3420 /* Expand a call EXP to the memcpy builtin.
3421 Return NULL_RTX if we failed, the caller should emit a normal call,
3422 otherwise try to get the result in TARGET, if convenient (and in
3423 mode MODE if that's convenient). */
3426 expand_builtin_memcpy (tree exp, rtx target)
3428 if (!validate_arglist (exp,
3429 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3433 tree dest = CALL_EXPR_ARG (exp, 0);
3434 tree src = CALL_EXPR_ARG (exp, 1);
3435 tree len = CALL_EXPR_ARG (exp, 2);
3436 const char *src_str;
3437 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3438 unsigned int dest_align
3439 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3440 rtx dest_mem, src_mem, dest_addr, len_rtx;
3441 HOST_WIDE_INT expected_size = -1;
3442 unsigned int expected_align = 0;
3444 /* If DEST is not a pointer type, call the normal function. */
3445 if (dest_align == 0)
3448 /* If either SRC is not a pointer type, don't do this
3449 operation in-line. */
3453 if (currently_expanding_gimple_stmt)
3454 stringop_block_profile (currently_expanding_gimple_stmt,
3455 &expected_align, &expected_size);
3457 if (expected_align < dest_align)
3458 expected_align = dest_align;
3459 dest_mem = get_memory_rtx (dest, len);
3460 set_mem_align (dest_mem, dest_align);
3461 len_rtx = expand_normal (len);
3462 src_str = c_getstr (src);
3464 /* If SRC is a string constant and block move would be done
3465 by pieces, we can avoid loading the string from memory
3466 and only stored the computed constants. */
3468 && CONST_INT_P (len_rtx)
3469 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3470 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3471 CONST_CAST (char *, src_str),
3474 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3475 builtin_memcpy_read_str,
3476 CONST_CAST (char *, src_str),
3477 dest_align, false, 0);
3478 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3479 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3483 src_mem = get_memory_rtx (src, len);
3484 set_mem_align (src_mem, src_align);
3486 /* Copy word part most expediently. */
3487 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3488 CALL_EXPR_TAILCALL (exp)
3489 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3490 expected_align, expected_size);
3494 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3495 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3501 /* Expand a call EXP to the mempcpy builtin.
3502 Return NULL_RTX if we failed; the caller should emit a normal call,
3503 otherwise try to get the result in TARGET, if convenient (and in
3504 mode MODE if that's convenient). If ENDP is 0 return the
3505 destination pointer, if ENDP is 1 return the end pointer ala
3506 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3510 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3512 if (!validate_arglist (exp,
3513 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3517 tree dest = CALL_EXPR_ARG (exp, 0);
3518 tree src = CALL_EXPR_ARG (exp, 1);
3519 tree len = CALL_EXPR_ARG (exp, 2);
3520 return expand_builtin_mempcpy_args (dest, src, len,
3521 target, mode, /*endp=*/ 1);
3525 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3526 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3527 so that this can also be called without constructing an actual CALL_EXPR.
3528 The other arguments and return value are the same as for
3529 expand_builtin_mempcpy. */
3532 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3533 rtx target, enum machine_mode mode, int endp)
3535 /* If return value is ignored, transform mempcpy into memcpy. */
3536 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3538 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3539 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3541 return expand_expr (result, target, mode, EXPAND_NORMAL);
3545 const char *src_str;
3546 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3547 unsigned int dest_align
3548 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3549 rtx dest_mem, src_mem, len_rtx;
3551 /* If either SRC or DEST is not a pointer type, don't do this
3552 operation in-line. */
3553 if (dest_align == 0 || src_align == 0)
3556 /* If LEN is not constant, call the normal function. */
3557 if (! host_integerp (len, 1))
3560 len_rtx = expand_normal (len);
3561 src_str = c_getstr (src);
3563 /* If SRC is a string constant and block move would be done
3564 by pieces, we can avoid loading the string from memory
3565 and only stored the computed constants. */
3567 && CONST_INT_P (len_rtx)
3568 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3569 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3570 CONST_CAST (char *, src_str),
3573 dest_mem = get_memory_rtx (dest, len);
3574 set_mem_align (dest_mem, dest_align);
3575 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3576 builtin_memcpy_read_str,
3577 CONST_CAST (char *, src_str),
3578 dest_align, false, endp);
3579 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3580 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3584 if (CONST_INT_P (len_rtx)
3585 && can_move_by_pieces (INTVAL (len_rtx),
3586 MIN (dest_align, src_align)))
3588 dest_mem = get_memory_rtx (dest, len);
3589 set_mem_align (dest_mem, dest_align);
3590 src_mem = get_memory_rtx (src, len);
3591 set_mem_align (src_mem, src_align);
3592 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3593 MIN (dest_align, src_align), endp);
3594 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3595 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3604 # define HAVE_movstr 0
3605 # define CODE_FOR_movstr CODE_FOR_nothing
3608 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3609 we failed, the caller should emit a normal call, otherwise try to
3610 get the result in TARGET, if convenient. If ENDP is 0 return the
3611 destination pointer, if ENDP is 1 return the end pointer ala
3612 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3616 expand_movstr (tree dest, tree src, rtx target, int endp)
3622 const struct insn_data_d * data;
3627 dest_mem = get_memory_rtx (dest, NULL);
3628 src_mem = get_memory_rtx (src, NULL);
3629 data = insn_data + CODE_FOR_movstr;
3632 target = force_reg (Pmode, XEXP (dest_mem, 0));
3633 dest_mem = replace_equiv_address (dest_mem, target);
3634 end = gen_reg_rtx (Pmode);
3639 || target == const0_rtx
3640 || ! (*data->operand[0].predicate) (target, Pmode))
3642 end = gen_reg_rtx (Pmode);
3643 if (target != const0_rtx)
3650 if (data->operand[0].mode != VOIDmode)
3651 end = gen_lowpart (data->operand[0].mode, end);
3653 insn = data->genfun (end, dest_mem, src_mem);
3659 /* movstr is supposed to set end to the address of the NUL
3660 terminator. If the caller requested a mempcpy-like return value,
3662 if (endp == 1 && target != const0_rtx)
3664 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3665 emit_move_insn (target, force_operand (tem, NULL_RTX));
3671 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3672 NULL_RTX if we failed the caller should emit a normal call, otherwise
3673 try to get the result in TARGET, if convenient (and in mode MODE if that's
3677 expand_builtin_strcpy (tree exp, rtx target)
3679 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3681 tree dest = CALL_EXPR_ARG (exp, 0);
3682 tree src = CALL_EXPR_ARG (exp, 1);
3683 return expand_builtin_strcpy_args (dest, src, target);
3688 /* Helper function to do the actual work for expand_builtin_strcpy. The
3689 arguments to the builtin_strcpy call DEST and SRC are broken out
3690 so that this can also be called without constructing an actual CALL_EXPR.
3691 The other arguments and return value are the same as for
3692 expand_builtin_strcpy. */
3695 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3697 return expand_movstr (dest, src, target, /*endp=*/0);
3700 /* Expand a call EXP to the stpcpy builtin.
3701 Return NULL_RTX if we failed the caller should emit a normal call,
3702 otherwise try to get the result in TARGET, if convenient (and in
3703 mode MODE if that's convenient). */
3706 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3709 location_t loc = EXPR_LOCATION (exp);
3711 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3714 dst = CALL_EXPR_ARG (exp, 0);
3715 src = CALL_EXPR_ARG (exp, 1);
3717 /* If return value is ignored, transform stpcpy into strcpy. */
3718 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3720 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3721 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3722 return expand_expr (result, target, mode, EXPAND_NORMAL);
3729 /* Ensure we get an actual string whose length can be evaluated at
3730 compile-time, not an expression containing a string. This is
3731 because the latter will potentially produce pessimized code
3732 when used to produce the return value. */
3733 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3734 return expand_movstr (dst, src, target, /*endp=*/2);
3736 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3737 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3738 target, mode, /*endp=*/2);
3743 if (TREE_CODE (len) == INTEGER_CST)
3745 rtx len_rtx = expand_normal (len);
3747 if (CONST_INT_P (len_rtx))
3749 ret = expand_builtin_strcpy_args (dst, src, target);
3755 if (mode != VOIDmode)
3756 target = gen_reg_rtx (mode);
3758 target = gen_reg_rtx (GET_MODE (ret));
3760 if (GET_MODE (target) != GET_MODE (ret))
3761 ret = gen_lowpart (GET_MODE (target), ret);
3763 ret = plus_constant (ret, INTVAL (len_rtx));
3764 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3772 return expand_movstr (dst, src, target, /*endp=*/2);
3776 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3777 bytes from constant string DATA + OFFSET and return it as target
3781 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3782 enum machine_mode mode)
3784 const char *str = (const char *) data;
3786 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3789 return c_readstr (str + offset, mode);
3792 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3793 NULL_RTX if we failed the caller should emit a normal call. */
3796 expand_builtin_strncpy (tree exp, rtx target)
3798 location_t loc = EXPR_LOCATION (exp);
3800 if (validate_arglist (exp,
3801 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3803 tree dest = CALL_EXPR_ARG (exp, 0);
3804 tree src = CALL_EXPR_ARG (exp, 1);
3805 tree len = CALL_EXPR_ARG (exp, 2);
3806 tree slen = c_strlen (src, 1);
3808 /* We must be passed a constant len and src parameter. */
3809 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3812 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3814 /* We're required to pad with trailing zeros if the requested
3815 len is greater than strlen(s2)+1. In that case try to
3816 use store_by_pieces, if it fails, punt. */
3817 if (tree_int_cst_lt (slen, len))
3819 unsigned int dest_align
3820 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3821 const char *p = c_getstr (src);
3824 if (!p || dest_align == 0 || !host_integerp (len, 1)
3825 || !can_store_by_pieces (tree_low_cst (len, 1),
3826 builtin_strncpy_read_str,
3827 CONST_CAST (char *, p),
3831 dest_mem = get_memory_rtx (dest, len);
3832 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3833 builtin_strncpy_read_str,
3834 CONST_CAST (char *, p), dest_align, false, 0);
3835 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3836 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3843 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3844 bytes from constant string DATA + OFFSET and return it as target
3848 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3849 enum machine_mode mode)
3851 const char *c = (const char *) data;
3852 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3854 memset (p, *c, GET_MODE_SIZE (mode));
3856 return c_readstr (p, mode);
3859 /* Callback routine for store_by_pieces. Return the RTL of a register
3860 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3861 char value given in the RTL register data. For example, if mode is
3862 4 bytes wide, return the RTL for 0x01010101*data. */
3865 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3866 enum machine_mode mode)
3872 size = GET_MODE_SIZE (mode);
3876 p = XALLOCAVEC (char, size);
3877 memset (p, 1, size);
3878 coeff = c_readstr (p, mode);
3880 target = convert_to_mode (mode, (rtx) data, 1);
3881 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3882 return force_reg (mode, target);
3885 /* Expand expression EXP, which is a call to the memset builtin. Return
3886 NULL_RTX if we failed the caller should emit a normal call, otherwise
3887 try to get the result in TARGET, if convenient (and in mode MODE if that's
3891 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3893 if (!validate_arglist (exp,
3894 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898 tree dest = CALL_EXPR_ARG (exp, 0);
3899 tree val = CALL_EXPR_ARG (exp, 1);
3900 tree len = CALL_EXPR_ARG (exp, 2);
3901 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3905 /* Helper function to do the actual work for expand_builtin_memset. The
3906 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3907 so that this can also be called without constructing an actual CALL_EXPR.
3908 The other arguments and return value are the same as for
3909 expand_builtin_memset. */
3912 expand_builtin_memset_args (tree dest, tree val, tree len,
3913 rtx target, enum machine_mode mode, tree orig_exp)
3916 enum built_in_function fcode;
3918 unsigned int dest_align;
3919 rtx dest_mem, dest_addr, len_rtx;
3920 HOST_WIDE_INT expected_size = -1;
3921 unsigned int expected_align = 0;
3923 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3925 /* If DEST is not a pointer type, don't do this operation in-line. */
3926 if (dest_align == 0)
3929 if (currently_expanding_gimple_stmt)
3930 stringop_block_profile (currently_expanding_gimple_stmt,
3931 &expected_align, &expected_size);
3933 if (expected_align < dest_align)
3934 expected_align = dest_align;
3936 /* If the LEN parameter is zero, return DEST. */
3937 if (integer_zerop (len))
3939 /* Evaluate and ignore VAL in case it has side-effects. */
3940 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3941 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3944 /* Stabilize the arguments in case we fail. */
3945 dest = builtin_save_expr (dest);
3946 val = builtin_save_expr (val);
3947 len = builtin_save_expr (len);
3949 len_rtx = expand_normal (len);
3950 dest_mem = get_memory_rtx (dest, len);
3952 if (TREE_CODE (val) != INTEGER_CST)
3956 val_rtx = expand_normal (val);
3957 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3960 /* Assume that we can memset by pieces if we can store
3961 * the coefficients by pieces (in the required modes).
3962 * We can't pass builtin_memset_gen_str as that emits RTL. */
3964 if (host_integerp (len, 1)
3965 && can_store_by_pieces (tree_low_cst (len, 1),
3966 builtin_memset_read_str, &c, dest_align,
3969 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3971 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3972 builtin_memset_gen_str, val_rtx, dest_align,
3975 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3976 dest_align, expected_align,
3980 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3981 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3985 if (target_char_cast (val, &c))
3990 if (host_integerp (len, 1)
3991 && can_store_by_pieces (tree_low_cst (len, 1),
3992 builtin_memset_read_str, &c, dest_align,
3994 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3995 builtin_memset_read_str, &c, dest_align, true, 0);
3996 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3997 dest_align, expected_align,
4001 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4002 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4006 set_mem_align (dest_mem, dest_align);
4007 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4008 CALL_EXPR_TAILCALL (orig_exp)
4009 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4010 expected_align, expected_size);
4014 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4015 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4021 fndecl = get_callee_fndecl (orig_exp);
4022 fcode = DECL_FUNCTION_CODE (fndecl);
4023 if (fcode == BUILT_IN_MEMSET)
4024 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4026 else if (fcode == BUILT_IN_BZERO)
4027 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4031 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4032 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4033 return expand_call (fn, target, target == const0_rtx);
4036 /* Expand expression EXP, which is a call to the bzero builtin. Return
4037 NULL_RTX if we failed the caller should emit a normal call. */
4040 expand_builtin_bzero (tree exp)
4043 location_t loc = EXPR_LOCATION (exp);
4045 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4048 dest = CALL_EXPR_ARG (exp, 0);
4049 size = CALL_EXPR_ARG (exp, 1);
4051 /* New argument list transforming bzero(ptr x, int y) to
4052 memset(ptr x, int 0, size_t y). This is done this way
4053 so that if it isn't expanded inline, we fallback to
4054 calling bzero instead of memset. */
4056 return expand_builtin_memset_args (dest, integer_zero_node,
4057 fold_convert_loc (loc, sizetype, size),
4058 const0_rtx, VOIDmode, exp);
4061 /* Expand expression EXP, which is a call to the memcmp built-in function.
4062 Return NULL_RTX if we failed and the
4063 caller should emit a normal call, otherwise try to get the result in
4064 TARGET, if convenient (and in mode MODE, if that's convenient). */
4067 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4068 ATTRIBUTE_UNUSED enum machine_mode mode)
4070 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4072 if (!validate_arglist (exp,
4073 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4076 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4078 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4081 tree arg1 = CALL_EXPR_ARG (exp, 0);
4082 tree arg2 = CALL_EXPR_ARG (exp, 1);
4083 tree len = CALL_EXPR_ARG (exp, 2);
4085 unsigned int arg1_align
4086 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4087 unsigned int arg2_align
4088 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4089 enum machine_mode insn_mode;
4091 #ifdef HAVE_cmpmemsi
4093 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4096 #ifdef HAVE_cmpstrnsi
4098 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4103 /* If we don't have POINTER_TYPE, call the function. */
4104 if (arg1_align == 0 || arg2_align == 0)
4107 /* Make a place to write the result of the instruction. */
4110 && REG_P (result) && GET_MODE (result) == insn_mode
4111 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4112 result = gen_reg_rtx (insn_mode);
4114 arg1_rtx = get_memory_rtx (arg1, len);
4115 arg2_rtx = get_memory_rtx (arg2, len);
4116 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4118 /* Set MEM_SIZE as appropriate. */
4119 if (CONST_INT_P (arg3_rtx))
4121 set_mem_size (arg1_rtx, arg3_rtx);
4122 set_mem_size (arg2_rtx, arg3_rtx);
4125 #ifdef HAVE_cmpmemsi
4127 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4128 GEN_INT (MIN (arg1_align, arg2_align)));
4131 #ifdef HAVE_cmpstrnsi
4133 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4134 GEN_INT (MIN (arg1_align, arg2_align)));
4142 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4143 TYPE_MODE (integer_type_node), 3,
4144 XEXP (arg1_rtx, 0), Pmode,
4145 XEXP (arg2_rtx, 0), Pmode,
4146 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4147 TYPE_UNSIGNED (sizetype)),
4148 TYPE_MODE (sizetype));
4150 /* Return the value in the proper mode for this function. */
4151 mode = TYPE_MODE (TREE_TYPE (exp));
4152 if (GET_MODE (result) == mode)
4154 else if (target != 0)
4156 convert_move (target, result, 0);
4160 return convert_to_mode (mode, result, 0);
4167 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4168 if we failed the caller should emit a normal call, otherwise try to get
4169 the result in TARGET, if convenient. */
4172 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4174 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4177 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4178 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4179 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4181 rtx arg1_rtx, arg2_rtx;
4182 rtx result, insn = NULL_RTX;
4184 tree arg1 = CALL_EXPR_ARG (exp, 0);
4185 tree arg2 = CALL_EXPR_ARG (exp, 1);
4187 unsigned int arg1_align
4188 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4189 unsigned int arg2_align
4190 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4192 /* If we don't have POINTER_TYPE, call the function. */
4193 if (arg1_align == 0 || arg2_align == 0)
4196 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4197 arg1 = builtin_save_expr (arg1);
4198 arg2 = builtin_save_expr (arg2);
4200 arg1_rtx = get_memory_rtx (arg1, NULL);
4201 arg2_rtx = get_memory_rtx (arg2, NULL);
4203 #ifdef HAVE_cmpstrsi
4204 /* Try to call cmpstrsi. */
4207 enum machine_mode insn_mode
4208 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4210 /* Make a place to write the result of the instruction. */
4213 && REG_P (result) && GET_MODE (result) == insn_mode
4214 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4215 result = gen_reg_rtx (insn_mode);
4217 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4218 GEN_INT (MIN (arg1_align, arg2_align)));
4221 #ifdef HAVE_cmpstrnsi
4222 /* Try to determine at least one length and call cmpstrnsi. */
4223 if (!insn && HAVE_cmpstrnsi)
4228 enum machine_mode insn_mode
4229 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4230 tree len1 = c_strlen (arg1, 1);
4231 tree len2 = c_strlen (arg2, 1);
4234 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4236 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4238 /* If we don't have a constant length for the first, use the length
4239 of the second, if we know it. We don't require a constant for
4240 this case; some cost analysis could be done if both are available
4241 but neither is constant. For now, assume they're equally cheap,
4242 unless one has side effects. If both strings have constant lengths,
4249 else if (TREE_SIDE_EFFECTS (len1))
4251 else if (TREE_SIDE_EFFECTS (len2))
4253 else if (TREE_CODE (len1) != INTEGER_CST)
4255 else if (TREE_CODE (len2) != INTEGER_CST)
4257 else if (tree_int_cst_lt (len1, len2))
4262 /* If both arguments have side effects, we cannot optimize. */
4263 if (!len || TREE_SIDE_EFFECTS (len))
4266 arg3_rtx = expand_normal (len);
4268 /* Make a place to write the result of the instruction. */
4271 && REG_P (result) && GET_MODE (result) == insn_mode
4272 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4273 result = gen_reg_rtx (insn_mode);
4275 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4276 GEN_INT (MIN (arg1_align, arg2_align)));
4282 enum machine_mode mode;
4285 /* Return the value in the proper mode for this function. */
4286 mode = TYPE_MODE (TREE_TYPE (exp));
4287 if (GET_MODE (result) == mode)
4290 return convert_to_mode (mode, result, 0);
4291 convert_move (target, result, 0);
4295 /* Expand the library call ourselves using a stabilized argument
4296 list to avoid re-evaluating the function's arguments twice. */
4297 #ifdef HAVE_cmpstrnsi
4300 fndecl = get_callee_fndecl (exp);
4301 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4302 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4303 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4304 return expand_call (fn, target, target == const0_rtx);
4310 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4311 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4312 the result in TARGET, if convenient. */
4315 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4316 ATTRIBUTE_UNUSED enum machine_mode mode)
4318 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4320 if (!validate_arglist (exp,
4321 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4324 /* If c_strlen can determine an expression for one of the string
4325 lengths, and it doesn't have side effects, then emit cmpstrnsi
4326 using length MIN(strlen(string)+1, arg3). */
4327 #ifdef HAVE_cmpstrnsi
4330 tree len, len1, len2;
4331 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4334 tree arg1 = CALL_EXPR_ARG (exp, 0);
4335 tree arg2 = CALL_EXPR_ARG (exp, 1);
4336 tree arg3 = CALL_EXPR_ARG (exp, 2);
4338 unsigned int arg1_align
4339 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4340 unsigned int arg2_align
4341 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4342 enum machine_mode insn_mode
4343 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4345 len1 = c_strlen (arg1, 1);
4346 len2 = c_strlen (arg2, 1);
4349 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4351 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4353 /* If we don't have a constant length for the first, use the length
4354 of the second, if we know it. We don't require a constant for
4355 this case; some cost analysis could be done if both are available
4356 but neither is constant. For now, assume they're equally cheap,
4357 unless one has side effects. If both strings have constant lengths,
4364 else if (TREE_SIDE_EFFECTS (len1))
4366 else if (TREE_SIDE_EFFECTS (len2))
4368 else if (TREE_CODE (len1) != INTEGER_CST)
4370 else if (TREE_CODE (len2) != INTEGER_CST)
4372 else if (tree_int_cst_lt (len1, len2))
4377 /* If both arguments have side effects, we cannot optimize. */
4378 if (!len || TREE_SIDE_EFFECTS (len))
4381 /* The actual new length parameter is MIN(len,arg3). */
4382 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4383 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4385 /* If we don't have POINTER_TYPE, call the function. */
4386 if (arg1_align == 0 || arg2_align == 0)
4389 /* Make a place to write the result of the instruction. */
4392 && REG_P (result) && GET_MODE (result) == insn_mode
4393 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4394 result = gen_reg_rtx (insn_mode);
4396 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4397 arg1 = builtin_save_expr (arg1);
4398 arg2 = builtin_save_expr (arg2);
4399 len = builtin_save_expr (len);
4401 arg1_rtx = get_memory_rtx (arg1, len);
4402 arg2_rtx = get_memory_rtx (arg2, len);
4403 arg3_rtx = expand_normal (len);
4404 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4405 GEN_INT (MIN (arg1_align, arg2_align)));
4410 /* Return the value in the proper mode for this function. */
4411 mode = TYPE_MODE (TREE_TYPE (exp));
4412 if (GET_MODE (result) == mode)
4415 return convert_to_mode (mode, result, 0);
4416 convert_move (target, result, 0);
4420 /* Expand the library call ourselves using a stabilized argument
4421 list to avoid re-evaluating the function's arguments twice. */
4422 fndecl = get_callee_fndecl (exp);
4423 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4425 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4426 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4427 return expand_call (fn, target, target == const0_rtx);
4433 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4434 if that's convenient. */
4437 expand_builtin_saveregs (void)
4441 /* Don't do __builtin_saveregs more than once in a function.
4442 Save the result of the first call and reuse it. */
4443 if (saveregs_value != 0)
4444 return saveregs_value;
4446 /* When this function is called, it means that registers must be
4447 saved on entry to this function. So we migrate the call to the
4448 first insn of this function. */
4452 /* Do whatever the machine needs done in this case. */
4453 val = targetm.calls.expand_builtin_saveregs ();
4458 saveregs_value = val;
4460 /* Put the insns after the NOTE that starts the function. If this
4461 is inside a start_sequence, make the outer-level insn chain current, so
4462 the code is placed at the start of the function. */
4463 push_topmost_sequence ();
4464 emit_insn_after (seq, entry_of_function ());
4465 pop_topmost_sequence ();
4470 /* Expand a call to __builtin_next_arg. */
4473 expand_builtin_next_arg (void)
4475 /* Checking arguments is already done in fold_builtin_next_arg
4476 that must be called before this function. */
4477 return expand_binop (ptr_mode, add_optab,
4478 crtl->args.internal_arg_pointer,
4479 crtl->args.arg_offset_rtx,
4480 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4483 /* Make it easier for the backends by protecting the valist argument
4484 from multiple evaluations. */
4487 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4489 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4491 /* The current way of determining the type of valist is completely
4492 bogus. We should have the information on the va builtin instead. */
4494 vatype = targetm.fn_abi_va_list (cfun->decl);
4496 if (TREE_CODE (vatype) == ARRAY_TYPE)
4498 if (TREE_SIDE_EFFECTS (valist))
4499 valist = save_expr (valist);
4501 /* For this case, the backends will be expecting a pointer to
4502 vatype, but it's possible we've actually been given an array
4503 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4505 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4507 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4508 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4513 tree pt = build_pointer_type (vatype);
4517 if (! TREE_SIDE_EFFECTS (valist))
4520 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4521 TREE_SIDE_EFFECTS (valist) = 1;
4524 if (TREE_SIDE_EFFECTS (valist))
4525 valist = save_expr (valist);
4526 valist = fold_build2_loc (loc, MEM_REF,
4527 vatype, valist, build_int_cst (pt, 0));
4533 /* The "standard" definition of va_list is void*. */
4536 std_build_builtin_va_list (void)
4538 return ptr_type_node;
4541 /* The "standard" abi va_list is va_list_type_node. */
4544 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4546 return va_list_type_node;
4549 /* The "standard" type of va_list is va_list_type_node. */
4552 std_canonical_va_list_type (tree type)
4556 if (INDIRECT_REF_P (type))
4557 type = TREE_TYPE (type);
4558 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4559 type = TREE_TYPE (type);
4560 wtype = va_list_type_node;
4562 /* Treat structure va_list types. */
4563 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4564 htype = TREE_TYPE (htype);
4565 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4567 /* If va_list is an array type, the argument may have decayed
4568 to a pointer type, e.g. by being passed to another function.
4569 In that case, unwrap both types so that we can compare the
4570 underlying records. */
4571 if (TREE_CODE (htype) == ARRAY_TYPE
4572 || POINTER_TYPE_P (htype))
4574 wtype = TREE_TYPE (wtype);
4575 htype = TREE_TYPE (htype);
4578 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4579 return va_list_type_node;
4584 /* The "standard" implementation of va_start: just assign `nextarg' to
4588 std_expand_builtin_va_start (tree valist, rtx nextarg)
4590 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4591 convert_move (va_r, nextarg, 0);
4594 /* Expand EXP, a call to __builtin_va_start. */
4597 expand_builtin_va_start (tree exp)
4601 location_t loc = EXPR_LOCATION (exp);
4603 if (call_expr_nargs (exp) < 2)
4605 error_at (loc, "too few arguments to function %<va_start%>");
4609 if (fold_builtin_next_arg (exp, true))
4612 nextarg = expand_builtin_next_arg ();
4613 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4615 if (targetm.expand_builtin_va_start)
4616 targetm.expand_builtin_va_start (valist, nextarg);
4618 std_expand_builtin_va_start (valist, nextarg);
4623 /* The "standard" implementation of va_arg: read the value from the
4624 current (padded) address and increment by the (padded) size. */
4627 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4630 tree addr, t, type_size, rounded_size, valist_tmp;
4631 unsigned HOST_WIDE_INT align, boundary;
4634 #ifdef ARGS_GROW_DOWNWARD
4635 /* All of the alignment and movement below is for args-grow-up machines.
4636 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4637 implement their own specialized gimplify_va_arg_expr routines. */
4641 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4643 type = build_pointer_type (type);
4645 align = PARM_BOUNDARY / BITS_PER_UNIT;
4646 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4648 /* When we align parameter on stack for caller, if the parameter
4649 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4650 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4651 here with caller. */
4652 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4653 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4655 boundary /= BITS_PER_UNIT;
4657 /* Hoist the valist value into a temporary for the moment. */
4658 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4660 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4661 requires greater alignment, we must perform dynamic alignment. */
4662 if (boundary > align
4663 && !integer_zerop (TYPE_SIZE (type)))
4665 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4666 fold_build2 (POINTER_PLUS_EXPR,
4668 valist_tmp, size_int (boundary - 1)));
4669 gimplify_and_add (t, pre_p);
4671 t = fold_convert (sizetype, valist_tmp);
4672 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4673 fold_convert (TREE_TYPE (valist),
4674 fold_build2 (BIT_AND_EXPR, sizetype, t,
4675 size_int (-boundary))));
4676 gimplify_and_add (t, pre_p);
4681 /* If the actual alignment is less than the alignment of the type,
4682 adjust the type accordingly so that we don't assume strict alignment
4683 when dereferencing the pointer. */
4684 boundary *= BITS_PER_UNIT;
4685 if (boundary < TYPE_ALIGN (type))
4687 type = build_variant_type_copy (type);
4688 TYPE_ALIGN (type) = boundary;
4691 /* Compute the rounded size of the type. */
4692 type_size = size_in_bytes (type);
4693 rounded_size = round_up (type_size, align);
4695 /* Reduce rounded_size so it's sharable with the postqueue. */
4696 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4700 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4702 /* Small args are padded downward. */
4703 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4704 rounded_size, size_int (align));
4705 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4706 size_binop (MINUS_EXPR, rounded_size, type_size));
4707 addr = fold_build2 (POINTER_PLUS_EXPR,
4708 TREE_TYPE (addr), addr, t);
4711 /* Compute new value for AP. */
4712 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4713 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4714 gimplify_and_add (t, pre_p);
4716 addr = fold_convert (build_pointer_type (type), addr);
4719 addr = build_va_arg_indirect_ref (addr);
4721 return build_va_arg_indirect_ref (addr);
4724 /* Build an indirect-ref expression over the given TREE, which represents a
4725 piece of a va_arg() expansion. */
4727 build_va_arg_indirect_ref (tree addr)
4729 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4731 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4737 /* Return a dummy expression of type TYPE in order to keep going after an
4741 dummy_object (tree type)
4743 tree t = build_int_cst (build_pointer_type (type), 0);
4744 return build1 (INDIRECT_REF, type, t);
4747 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4748 builtin function, but a very special sort of operator. */
4750 enum gimplify_status
4751 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4753 tree promoted_type, have_va_type;
4754 tree valist = TREE_OPERAND (*expr_p, 0);
4755 tree type = TREE_TYPE (*expr_p);
4757 location_t loc = EXPR_LOCATION (*expr_p);
4759 /* Verify that valist is of the proper type. */
4760 have_va_type = TREE_TYPE (valist);
4761 if (have_va_type == error_mark_node)
4763 have_va_type = targetm.canonical_va_list_type (have_va_type);
4765 if (have_va_type == NULL_TREE)
4767 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4771 /* Generate a diagnostic for requesting data of a type that cannot
4772 be passed through `...' due to type promotion at the call site. */
4773 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4776 static bool gave_help;
4779 /* Unfortunately, this is merely undefined, rather than a constraint
4780 violation, so we cannot make this an error. If this call is never
4781 executed, the program is still strictly conforming. */
4782 warned = warning_at (loc, 0,
4783 "%qT is promoted to %qT when passed through %<...%>",
4784 type, promoted_type);
4785 if (!gave_help && warned)
4788 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4789 promoted_type, type);
4792 /* We can, however, treat "undefined" any way we please.
4793 Call abort to encourage the user to fix the program. */
4795 inform (loc, "if this code is reached, the program will abort");
4796 /* Before the abort, allow the evaluation of the va_list
4797 expression to exit or longjmp. */
4798 gimplify_and_add (valist, pre_p);
4799 t = build_call_expr_loc (loc,
4800 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4801 gimplify_and_add (t, pre_p);
4803 /* This is dead code, but go ahead and finish so that the
4804 mode of the result comes out right. */
4805 *expr_p = dummy_object (type);
4810 /* Make it easier for the backends by protecting the valist argument
4811 from multiple evaluations. */
4812 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4814 /* For this case, the backends will be expecting a pointer to
4815 TREE_TYPE (abi), but it's possible we've
4816 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4818 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4820 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4821 valist = fold_convert_loc (loc, p1,
4822 build_fold_addr_expr_loc (loc, valist));
4825 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4828 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4830 if (!targetm.gimplify_va_arg_expr)
4831 /* FIXME: Once most targets are converted we should merely
4832 assert this is non-null. */
4835 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4840 /* Expand EXP, a call to __builtin_va_end. */
4843 expand_builtin_va_end (tree exp)
4845 tree valist = CALL_EXPR_ARG (exp, 0);
4847 /* Evaluate for side effects, if needed. I hate macros that don't
4849 if (TREE_SIDE_EFFECTS (valist))
4850 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4855 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4856 builtin rather than just as an assignment in stdarg.h because of the
4857 nastiness of array-type va_list types. */
4860 expand_builtin_va_copy (tree exp)
4863 location_t loc = EXPR_LOCATION (exp);
4865 dst = CALL_EXPR_ARG (exp, 0);
4866 src = CALL_EXPR_ARG (exp, 1);
4868 dst = stabilize_va_list_loc (loc, dst, 1);
4869 src = stabilize_va_list_loc (loc, src, 0);
4871 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4873 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4875 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4876 TREE_SIDE_EFFECTS (t) = 1;
4877 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4881 rtx dstb, srcb, size;
4883 /* Evaluate to pointers. */
4884 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4885 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4886 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4887 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4889 dstb = convert_memory_address (Pmode, dstb);
4890 srcb = convert_memory_address (Pmode, srcb);
4892 /* "Dereference" to BLKmode memories. */
4893 dstb = gen_rtx_MEM (BLKmode, dstb);
4894 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4895 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4896 srcb = gen_rtx_MEM (BLKmode, srcb);
4897 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4898 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4901 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4907 /* Expand a call to one of the builtin functions __builtin_frame_address or
4908 __builtin_return_address. */
4911 expand_builtin_frame_address (tree fndecl, tree exp)
4913 /* The argument must be a nonnegative integer constant.
4914 It counts the number of frames to scan up the stack.
4915 The value is the return address saved in that frame. */
4916 if (call_expr_nargs (exp) == 0)
4917 /* Warning about missing arg was already issued. */
4919 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4921 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4922 error ("invalid argument to %<__builtin_frame_address%>");
4924 error ("invalid argument to %<__builtin_return_address%>");
4930 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4931 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4933 /* Some ports cannot access arbitrary stack frames. */
4936 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4937 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4939 warning (0, "unsupported argument to %<__builtin_return_address%>");
4943 /* For __builtin_frame_address, return what we've got. */
4944 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4948 && ! CONSTANT_P (tem))
4949 tem = copy_to_mode_reg (Pmode, tem);
4954 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4955 failed and the caller should emit a normal call, otherwise try to
4956 get the result in TARGET, if convenient. CANNOT_ACCUMULATE is the
4957 same as for allocate_dynamic_stack_space. */
4960 expand_builtin_alloca (tree exp, rtx target, bool cannot_accumulate)
4965 /* Emit normal call if marked not-inlineable. */
4966 if (CALL_CANNOT_INLINE_P (exp))
4969 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4972 /* Compute the argument. */
4973 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4975 /* Allocate the desired space. */
4976 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT,
4978 result = convert_memory_address (ptr_mode, result);
4983 /* Expand a call to a bswap builtin with argument ARG0. MODE
4984 is the mode to expand with. */
4987 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4989 enum machine_mode mode;
4993 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4996 arg = CALL_EXPR_ARG (exp, 0);
4997 mode = TYPE_MODE (TREE_TYPE (arg));
4998 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5000 target = expand_unop (mode, bswap_optab, op0, target, 1);
5002 gcc_assert (target);
5004 return convert_to_mode (mode, target, 0);
5007 /* Expand a call to a unary builtin in EXP.
5008 Return NULL_RTX if a normal call should be emitted rather than expanding the
5009 function in-line. If convenient, the result should be placed in TARGET.
5010 SUBTARGET may be used as the target for computing one of EXP's operands. */
5013 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5014 rtx subtarget, optab op_optab)
5018 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5021 /* Compute the argument. */
5022 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5023 VOIDmode, EXPAND_NORMAL);
5024 /* Compute op, into TARGET if possible.
5025 Set TARGET to wherever the result comes back. */
5026 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5027 op_optab, op0, target, 1);
5028 gcc_assert (target);
5030 return convert_to_mode (target_mode, target, 0);
5033 /* Expand a call to __builtin_expect. We just return our argument
5034 as the builtin_expect semantic should've been already executed by
5035 tree branch prediction pass. */
5038 expand_builtin_expect (tree exp, rtx target)
5042 if (call_expr_nargs (exp) < 2)
5044 arg = CALL_EXPR_ARG (exp, 0);
5046 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5047 /* When guessing was done, the hints should be already stripped away. */
5048 gcc_assert (!flag_guess_branch_prob
5049 || optimize == 0 || seen_error ());
5054 expand_builtin_trap (void)
5058 emit_insn (gen_trap ());
5061 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5065 /* Expand a call to __builtin_unreachable. We do nothing except emit
5066 a barrier saying that control flow will not pass here.
5068 It is the responsibility of the program being compiled to ensure
5069 that control flow does never reach __builtin_unreachable. */
5071 expand_builtin_unreachable (void)
5076 /* Expand EXP, a call to fabs, fabsf or fabsl.
5077 Return NULL_RTX if a normal call should be emitted rather than expanding
5078 the function inline. If convenient, the result should be placed
5079 in TARGET. SUBTARGET may be used as the target for computing
5083 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5085 enum machine_mode mode;
5089 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5092 arg = CALL_EXPR_ARG (exp, 0);
5093 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5094 mode = TYPE_MODE (TREE_TYPE (arg));
5095 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5096 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5099 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5100 Return NULL is a normal call should be emitted rather than expanding the
5101 function inline. If convenient, the result should be placed in TARGET.
5102 SUBTARGET may be used as the target for computing the operand. */
5105 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5110 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5113 arg = CALL_EXPR_ARG (exp, 0);
5114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5116 arg = CALL_EXPR_ARG (exp, 1);
5117 op1 = expand_normal (arg);
5119 return expand_copysign (op0, op1, target);
5122 /* Create a new constant string literal and return a char* pointer to it.
5123 The STRING_CST value is the LEN characters at STR. */
5125 build_string_literal (int len, const char *str)
5127 tree t, elem, index, type;
5129 t = build_string (len, str);
5130 elem = build_type_variant (char_type_node, 1, 0);
5131 index = build_index_type (size_int (len - 1));
5132 type = build_array_type (elem, index);
5133 TREE_TYPE (t) = type;
5134 TREE_CONSTANT (t) = 1;
5135 TREE_READONLY (t) = 1;
5136 TREE_STATIC (t) = 1;
5138 type = build_pointer_type (elem);
5139 t = build1 (ADDR_EXPR, type,
5140 build4 (ARRAY_REF, elem,
5141 t, integer_zero_node, NULL_TREE, NULL_TREE));
5145 /* Expand a call to either the entry or exit function profiler. */
5148 expand_builtin_profile_func (bool exitp)
5150 rtx this_rtx, which;
5152 this_rtx = DECL_RTL (current_function_decl);
5153 gcc_assert (MEM_P (this_rtx));
5154 this_rtx = XEXP (this_rtx, 0);
5157 which = profile_function_exit_libfunc;
5159 which = profile_function_entry_libfunc;
5161 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5162 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5169 /* Expand a call to __builtin___clear_cache. */
5172 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5174 #ifndef HAVE_clear_cache
5175 #ifdef CLEAR_INSN_CACHE
5176 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5177 does something. Just do the default expansion to a call to
5181 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5182 does nothing. There is no need to call it. Do nothing. */
5184 #endif /* CLEAR_INSN_CACHE */
5186 /* We have a "clear_cache" insn, and it will handle everything. */
5188 rtx begin_rtx, end_rtx;
5189 enum insn_code icode;
5191 /* We must not expand to a library call. If we did, any
5192 fallback library function in libgcc that might contain a call to
5193 __builtin___clear_cache() would recurse infinitely. */
5194 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5196 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5200 if (HAVE_clear_cache)
5202 icode = CODE_FOR_clear_cache;
5204 begin = CALL_EXPR_ARG (exp, 0);
5205 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5206 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5207 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5208 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5210 end = CALL_EXPR_ARG (exp, 1);
5211 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5212 end_rtx = convert_memory_address (Pmode, end_rtx);
5213 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5214 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5216 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5219 #endif /* HAVE_clear_cache */
5222 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5225 round_trampoline_addr (rtx tramp)
5227 rtx temp, addend, mask;
5229 /* If we don't need too much alignment, we'll have been guaranteed
5230 proper alignment by get_trampoline_type. */
5231 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5234 /* Round address up to desired boundary. */
5235 temp = gen_reg_rtx (Pmode);
5236 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5237 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5239 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5240 temp, 0, OPTAB_LIB_WIDEN);
5241 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5242 temp, 0, OPTAB_LIB_WIDEN);
5248 expand_builtin_init_trampoline (tree exp)
5250 tree t_tramp, t_func, t_chain;
5251 rtx m_tramp, r_tramp, r_chain, tmp;
5253 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5254 POINTER_TYPE, VOID_TYPE))
5257 t_tramp = CALL_EXPR_ARG (exp, 0);
5258 t_func = CALL_EXPR_ARG (exp, 1);
5259 t_chain = CALL_EXPR_ARG (exp, 2);
5261 r_tramp = expand_normal (t_tramp);
5262 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5263 MEM_NOTRAP_P (m_tramp) = 1;
5265 /* The TRAMP argument should be the address of a field within the
5266 local function's FRAME decl. Let's see if we can fill in the
5267 to fill in the MEM_ATTRs for this memory. */
5268 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5269 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5272 tmp = round_trampoline_addr (r_tramp);
5275 m_tramp = change_address (m_tramp, BLKmode, tmp);
5276 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5277 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5280 /* The FUNC argument should be the address of the nested function.
5281 Extract the actual function decl to pass to the hook. */
5282 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5283 t_func = TREE_OPERAND (t_func, 0);
5284 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5286 r_chain = expand_normal (t_chain);
5288 /* Generate insns to initialize the trampoline. */
5289 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5291 trampolines_created = 1;
5293 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5294 "trampoline generated for nested function %qD", t_func);
5300 expand_builtin_adjust_trampoline (tree exp)
5304 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5307 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5308 tramp = round_trampoline_addr (tramp);
5309 if (targetm.calls.trampoline_adjust_address)
5310 tramp = targetm.calls.trampoline_adjust_address (tramp);
5315 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5316 function. The function first checks whether the back end provides
5317 an insn to implement signbit for the respective mode. If not, it
5318 checks whether the floating point format of the value is such that
5319 the sign bit can be extracted. If that is not the case, the
5320 function returns NULL_RTX to indicate that a normal call should be
5321 emitted rather than expanding the function in-line. EXP is the
5322 expression that is a call to the builtin function; if convenient,
5323 the result should be placed in TARGET. */
5325 expand_builtin_signbit (tree exp, rtx target)
5327 const struct real_format *fmt;
5328 enum machine_mode fmode, imode, rmode;
5331 enum insn_code icode;
5333 location_t loc = EXPR_LOCATION (exp);
5335 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5338 arg = CALL_EXPR_ARG (exp, 0);
5339 fmode = TYPE_MODE (TREE_TYPE (arg));
5340 rmode = TYPE_MODE (TREE_TYPE (exp));
5341 fmt = REAL_MODE_FORMAT (fmode);
5343 arg = builtin_save_expr (arg);
5345 /* Expand the argument yielding a RTX expression. */
5346 temp = expand_normal (arg);
5348 /* Check if the back end provides an insn that handles signbit for the
5350 icode = optab_handler (signbit_optab, fmode);
5351 if (icode != CODE_FOR_nothing)
5353 rtx last = get_last_insn ();
5354 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5355 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5357 delete_insns_since (last);
5360 /* For floating point formats without a sign bit, implement signbit
5362 bitpos = fmt->signbit_ro;
5365 /* But we can't do this if the format supports signed zero. */
5366 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5369 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5370 build_real (TREE_TYPE (arg), dconst0));
5371 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5374 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5376 imode = int_mode_for_mode (fmode);
5377 if (imode == BLKmode)
5379 temp = gen_lowpart (imode, temp);
5384 /* Handle targets with different FP word orders. */
5385 if (FLOAT_WORDS_BIG_ENDIAN)
5386 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5388 word = bitpos / BITS_PER_WORD;
5389 temp = operand_subword_force (temp, word, fmode);
5390 bitpos = bitpos % BITS_PER_WORD;
5393 /* Force the intermediate word_mode (or narrower) result into a
5394 register. This avoids attempting to create paradoxical SUBREGs
5395 of floating point modes below. */
5396 temp = force_reg (imode, temp);
5398 /* If the bitpos is within the "result mode" lowpart, the operation
5399 can be implement with a single bitwise AND. Otherwise, we need
5400 a right shift and an AND. */
5402 if (bitpos < GET_MODE_BITSIZE (rmode))
5404 double_int mask = double_int_setbit (double_int_zero, bitpos);
5406 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5407 temp = gen_lowpart (rmode, temp);
5408 temp = expand_binop (rmode, and_optab, temp,
5409 immed_double_int_const (mask, rmode),
5410 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5414 /* Perform a logical right shift to place the signbit in the least
5415 significant bit, then truncate the result to the desired mode
5416 and mask just this bit. */
5417 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5418 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5419 temp = gen_lowpart (rmode, temp);
5420 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5421 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5427 /* Expand fork or exec calls. TARGET is the desired target of the
5428 call. EXP is the call. FN is the
5429 identificator of the actual function. IGNORE is nonzero if the
5430 value is to be ignored. */
5433 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5438 /* If we are not profiling, just call the function. */
5439 if (!profile_arc_flag)
5442 /* Otherwise call the wrapper. This should be equivalent for the rest of
5443 compiler, so the code does not diverge, and the wrapper may run the
5444 code necessary for keeping the profiling sane. */
5446 switch (DECL_FUNCTION_CODE (fn))
5449 id = get_identifier ("__gcov_fork");
5452 case BUILT_IN_EXECL:
5453 id = get_identifier ("__gcov_execl");
5456 case BUILT_IN_EXECV:
5457 id = get_identifier ("__gcov_execv");
5460 case BUILT_IN_EXECLP:
5461 id = get_identifier ("__gcov_execlp");
5464 case BUILT_IN_EXECLE:
5465 id = get_identifier ("__gcov_execle");
5468 case BUILT_IN_EXECVP:
5469 id = get_identifier ("__gcov_execvp");
5472 case BUILT_IN_EXECVE:
5473 id = get_identifier ("__gcov_execve");
5480 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5481 FUNCTION_DECL, id, TREE_TYPE (fn));
5482 DECL_EXTERNAL (decl) = 1;
5483 TREE_PUBLIC (decl) = 1;
5484 DECL_ARTIFICIAL (decl) = 1;
5485 TREE_NOTHROW (decl) = 1;
5486 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5487 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5488 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5489 return expand_call (call, target, ignore);
5494 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5495 the pointer in these functions is void*, the tree optimizers may remove
5496 casts. The mode computed in expand_builtin isn't reliable either, due
5497 to __sync_bool_compare_and_swap.
5499 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5500 group of builtins. This gives us log2 of the mode size. */
5502 static inline enum machine_mode
5503 get_builtin_sync_mode (int fcode_diff)
5505 /* The size is not negotiable, so ask not to get BLKmode in return
5506 if the target indicates that a smaller size would be better. */
5507 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5510 /* Expand the memory expression LOC and return the appropriate memory operand
5511 for the builtin_sync operations. */
5514 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5518 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5519 addr = convert_memory_address (Pmode, addr);
5521 /* Note that we explicitly do not want any alias information for this
5522 memory, so that we kill all other live memories. Otherwise we don't
5523 satisfy the full barrier semantics of the intrinsic. */
5524 mem = validize_mem (gen_rtx_MEM (mode, addr));
5526 /* The alignment needs to be at least according to that of the mode. */
5527 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5528 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5529 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5530 MEM_VOLATILE_P (mem) = 1;
5535 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5536 EXP is the CALL_EXPR. CODE is the rtx code
5537 that corresponds to the arithmetic or logical operation from the name;
5538 an exception here is that NOT actually means NAND. TARGET is an optional
5539 place for us to store the results; AFTER is true if this is the
5540 fetch_and_xxx form. IGNORE is true if we don't actually care about
5541 the result of the operation at all. */
5544 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5545 enum rtx_code code, bool after,
5546 rtx target, bool ignore)
5549 enum machine_mode old_mode;
5550 location_t loc = EXPR_LOCATION (exp);
5552 if (code == NOT && warn_sync_nand)
5554 tree fndecl = get_callee_fndecl (exp);
5555 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5557 static bool warned_f_a_n, warned_n_a_f;
5561 case BUILT_IN_FETCH_AND_NAND_1:
5562 case BUILT_IN_FETCH_AND_NAND_2:
5563 case BUILT_IN_FETCH_AND_NAND_4:
5564 case BUILT_IN_FETCH_AND_NAND_8:
5565 case BUILT_IN_FETCH_AND_NAND_16:
5570 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5571 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5572 warned_f_a_n = true;
5575 case BUILT_IN_NAND_AND_FETCH_1:
5576 case BUILT_IN_NAND_AND_FETCH_2:
5577 case BUILT_IN_NAND_AND_FETCH_4:
5578 case BUILT_IN_NAND_AND_FETCH_8:
5579 case BUILT_IN_NAND_AND_FETCH_16:
5584 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5585 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5586 warned_n_a_f = true;
5594 /* Expand the operands. */
5595 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5597 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5598 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5599 of CONST_INTs, where we know the old_mode only from the call argument. */
5600 old_mode = GET_MODE (val);
5601 if (old_mode == VOIDmode)
5602 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5603 val = convert_modes (mode, old_mode, val, 1);
5606 return expand_sync_operation (mem, val, code);
5608 return expand_sync_fetch_operation (mem, val, code, after, target);
5611 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5612 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5613 true if this is the boolean form. TARGET is a place for us to store the
5614 results; this is NOT optional if IS_BOOL is true. */
5617 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5618 bool is_bool, rtx target)
5620 rtx old_val, new_val, mem;
5621 enum machine_mode old_mode;
5623 /* Expand the operands. */
5624 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5627 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5628 mode, EXPAND_NORMAL);
5629 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5630 of CONST_INTs, where we know the old_mode only from the call argument. */
5631 old_mode = GET_MODE (old_val);
5632 if (old_mode == VOIDmode)
5633 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5634 old_val = convert_modes (mode, old_mode, old_val, 1);
5636 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5637 mode, EXPAND_NORMAL);
5638 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5639 of CONST_INTs, where we know the old_mode only from the call argument. */
5640 old_mode = GET_MODE (new_val);
5641 if (old_mode == VOIDmode)
5642 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5643 new_val = convert_modes (mode, old_mode, new_val, 1);
5646 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5648 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5651 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5652 general form is actually an atomic exchange, and some targets only
5653 support a reduced form with the second argument being a constant 1.
5654 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5658 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5662 enum machine_mode old_mode;
5664 /* Expand the operands. */
5665 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5666 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5667 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5668 of CONST_INTs, where we know the old_mode only from the call argument. */
5669 old_mode = GET_MODE (val);
5670 if (old_mode == VOIDmode)
5671 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5672 val = convert_modes (mode, old_mode, val, 1);
5674 return expand_sync_lock_test_and_set (mem, val, target);
5677 /* Expand the __sync_synchronize intrinsic. */
5680 expand_builtin_synchronize (void)
5683 VEC (tree, gc) *v_clobbers;
5685 #ifdef HAVE_memory_barrier
5686 if (HAVE_memory_barrier)
5688 emit_insn (gen_memory_barrier ());
5693 if (synchronize_libfunc != NULL_RTX)
5695 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5699 /* If no explicit memory barrier instruction is available, create an
5700 empty asm stmt with a memory clobber. */
5701 v_clobbers = VEC_alloc (tree, gc, 1);
5702 VEC_quick_push (tree, v_clobbers,
5703 tree_cons (NULL, build_string (6, "memory"), NULL));
5704 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5705 gimple_asm_set_volatile (x, true);
5706 expand_asm_stmt (x);
5709 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5712 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5714 enum insn_code icode;
5716 rtx val = const0_rtx;
5718 /* Expand the operands. */
5719 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5721 /* If there is an explicit operation in the md file, use it. */
5722 icode = direct_optab_handler (sync_lock_release_optab, mode);
5723 if (icode != CODE_FOR_nothing)
5725 if (!insn_data[icode].operand[1].predicate (val, mode))
5726 val = force_reg (mode, val);
5728 insn = GEN_FCN (icode) (mem, val);
5736 /* Otherwise we can implement this operation by emitting a barrier
5737 followed by a store of zero. */
5738 expand_builtin_synchronize ();
5739 emit_move_insn (mem, val);
5742 /* Expand an expression EXP that calls a built-in function,
5743 with result going to TARGET if that's convenient
5744 (and in mode MODE if that's convenient).
5745 SUBTARGET may be used as the target for computing one of EXP's operands.
5746 IGNORE is nonzero if the value is to be ignored. */
5749 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5752 tree fndecl = get_callee_fndecl (exp);
5753 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5754 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5757 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5758 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5760 /* When not optimizing, generate calls to library functions for a certain
5763 && !called_as_built_in (fndecl)
5764 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5765 && fcode != BUILT_IN_ALLOCA
5766 && fcode != BUILT_IN_FREE)
5767 return expand_call (exp, target, ignore);
5769 /* The built-in function expanders test for target == const0_rtx
5770 to determine whether the function's result will be ignored. */
5772 target = const0_rtx;
5774 /* If the result of a pure or const built-in function is ignored, and
5775 none of its arguments are volatile, we can avoid expanding the
5776 built-in call and just evaluate the arguments for side-effects. */
5777 if (target == const0_rtx
5778 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5779 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5781 bool volatilep = false;
5783 call_expr_arg_iterator iter;
5785 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5786 if (TREE_THIS_VOLATILE (arg))
5794 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5795 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5802 CASE_FLT_FN (BUILT_IN_FABS):
5803 target = expand_builtin_fabs (exp, target, subtarget);
5808 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5809 target = expand_builtin_copysign (exp, target, subtarget);
5814 /* Just do a normal library call if we were unable to fold
5816 CASE_FLT_FN (BUILT_IN_CABS):
5819 CASE_FLT_FN (BUILT_IN_EXP):
5820 CASE_FLT_FN (BUILT_IN_EXP10):
5821 CASE_FLT_FN (BUILT_IN_POW10):
5822 CASE_FLT_FN (BUILT_IN_EXP2):
5823 CASE_FLT_FN (BUILT_IN_EXPM1):
5824 CASE_FLT_FN (BUILT_IN_LOGB):
5825 CASE_FLT_FN (BUILT_IN_LOG):
5826 CASE_FLT_FN (BUILT_IN_LOG10):
5827 CASE_FLT_FN (BUILT_IN_LOG2):
5828 CASE_FLT_FN (BUILT_IN_LOG1P):
5829 CASE_FLT_FN (BUILT_IN_TAN):
5830 CASE_FLT_FN (BUILT_IN_ASIN):
5831 CASE_FLT_FN (BUILT_IN_ACOS):
5832 CASE_FLT_FN (BUILT_IN_ATAN):
5833 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5834 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5835 because of possible accuracy problems. */
5836 if (! flag_unsafe_math_optimizations)
5838 CASE_FLT_FN (BUILT_IN_SQRT):
5839 CASE_FLT_FN (BUILT_IN_FLOOR):
5840 CASE_FLT_FN (BUILT_IN_CEIL):
5841 CASE_FLT_FN (BUILT_IN_TRUNC):
5842 CASE_FLT_FN (BUILT_IN_ROUND):
5843 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5844 CASE_FLT_FN (BUILT_IN_RINT):
5845 target = expand_builtin_mathfn (exp, target, subtarget);
5850 CASE_FLT_FN (BUILT_IN_ILOGB):
5851 if (! flag_unsafe_math_optimizations)
5853 CASE_FLT_FN (BUILT_IN_ISINF):
5854 CASE_FLT_FN (BUILT_IN_FINITE):
5855 case BUILT_IN_ISFINITE:
5856 case BUILT_IN_ISNORMAL:
5857 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5862 CASE_FLT_FN (BUILT_IN_LCEIL):
5863 CASE_FLT_FN (BUILT_IN_LLCEIL):
5864 CASE_FLT_FN (BUILT_IN_LFLOOR):
5865 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5866 target = expand_builtin_int_roundingfn (exp, target);
5871 CASE_FLT_FN (BUILT_IN_LRINT):
5872 CASE_FLT_FN (BUILT_IN_LLRINT):
5873 CASE_FLT_FN (BUILT_IN_LROUND):
5874 CASE_FLT_FN (BUILT_IN_LLROUND):
5875 target = expand_builtin_int_roundingfn_2 (exp, target);
5880 CASE_FLT_FN (BUILT_IN_POW):
5881 target = expand_builtin_pow (exp, target, subtarget);
5886 CASE_FLT_FN (BUILT_IN_POWI):
5887 target = expand_builtin_powi (exp, target, subtarget);
5892 CASE_FLT_FN (BUILT_IN_ATAN2):
5893 CASE_FLT_FN (BUILT_IN_LDEXP):
5894 CASE_FLT_FN (BUILT_IN_SCALB):
5895 CASE_FLT_FN (BUILT_IN_SCALBN):
5896 CASE_FLT_FN (BUILT_IN_SCALBLN):
5897 if (! flag_unsafe_math_optimizations)
5900 CASE_FLT_FN (BUILT_IN_FMOD):
5901 CASE_FLT_FN (BUILT_IN_REMAINDER):
5902 CASE_FLT_FN (BUILT_IN_DREM):
5903 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5908 CASE_FLT_FN (BUILT_IN_CEXPI):
5909 target = expand_builtin_cexpi (exp, target, subtarget);
5910 gcc_assert (target);
5913 CASE_FLT_FN (BUILT_IN_SIN):
5914 CASE_FLT_FN (BUILT_IN_COS):
5915 if (! flag_unsafe_math_optimizations)
5917 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5922 CASE_FLT_FN (BUILT_IN_SINCOS):
5923 if (! flag_unsafe_math_optimizations)
5925 target = expand_builtin_sincos (exp);
5930 case BUILT_IN_APPLY_ARGS:
5931 return expand_builtin_apply_args ();
5933 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5934 FUNCTION with a copy of the parameters described by
5935 ARGUMENTS, and ARGSIZE. It returns a block of memory
5936 allocated on the stack into which is stored all the registers
5937 that might possibly be used for returning the result of a
5938 function. ARGUMENTS is the value returned by
5939 __builtin_apply_args. ARGSIZE is the number of bytes of
5940 arguments that must be copied. ??? How should this value be
5941 computed? We'll also need a safe worst case value for varargs
5943 case BUILT_IN_APPLY:
5944 if (!validate_arglist (exp, POINTER_TYPE,
5945 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5946 && !validate_arglist (exp, REFERENCE_TYPE,
5947 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5953 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5954 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5955 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5957 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5960 /* __builtin_return (RESULT) causes the function to return the
5961 value described by RESULT. RESULT is address of the block of
5962 memory returned by __builtin_apply. */
5963 case BUILT_IN_RETURN:
5964 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5965 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5968 case BUILT_IN_SAVEREGS:
5969 return expand_builtin_saveregs ();
5971 case BUILT_IN_VA_ARG_PACK:
5972 /* All valid uses of __builtin_va_arg_pack () are removed during
5974 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5977 case BUILT_IN_VA_ARG_PACK_LEN:
5978 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5980 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5983 /* Return the address of the first anonymous stack arg. */
5984 case BUILT_IN_NEXT_ARG:
5985 if (fold_builtin_next_arg (exp, false))
5987 return expand_builtin_next_arg ();
5989 case BUILT_IN_CLEAR_CACHE:
5990 target = expand_builtin___clear_cache (exp);
5995 case BUILT_IN_CLASSIFY_TYPE:
5996 return expand_builtin_classify_type (exp);
5998 case BUILT_IN_CONSTANT_P:
6001 case BUILT_IN_FRAME_ADDRESS:
6002 case BUILT_IN_RETURN_ADDRESS:
6003 return expand_builtin_frame_address (fndecl, exp);
6005 /* Returns the address of the area where the structure is returned.
6007 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6008 if (call_expr_nargs (exp) != 0
6009 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6010 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6013 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6015 case BUILT_IN_ALLOCA:
6016 /* If the allocation stems from the declaration of a variable-sized
6017 object, it cannot accumulate. */
6018 target = expand_builtin_alloca (exp, target, ALLOCA_FOR_VAR_P (exp));
6023 case BUILT_IN_STACK_SAVE:
6024 return expand_stack_save ();
6026 case BUILT_IN_STACK_RESTORE:
6027 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6030 case BUILT_IN_BSWAP32:
6031 case BUILT_IN_BSWAP64:
6032 target = expand_builtin_bswap (exp, target, subtarget);
6038 CASE_INT_FN (BUILT_IN_FFS):
6039 case BUILT_IN_FFSIMAX:
6040 target = expand_builtin_unop (target_mode, exp, target,
6041 subtarget, ffs_optab);
6046 CASE_INT_FN (BUILT_IN_CLZ):
6047 case BUILT_IN_CLZIMAX:
6048 target = expand_builtin_unop (target_mode, exp, target,
6049 subtarget, clz_optab);
6054 CASE_INT_FN (BUILT_IN_CTZ):
6055 case BUILT_IN_CTZIMAX:
6056 target = expand_builtin_unop (target_mode, exp, target,
6057 subtarget, ctz_optab);
6062 CASE_INT_FN (BUILT_IN_POPCOUNT):
6063 case BUILT_IN_POPCOUNTIMAX:
6064 target = expand_builtin_unop (target_mode, exp, target,
6065 subtarget, popcount_optab);
6070 CASE_INT_FN (BUILT_IN_PARITY):
6071 case BUILT_IN_PARITYIMAX:
6072 target = expand_builtin_unop (target_mode, exp, target,
6073 subtarget, parity_optab);
6078 case BUILT_IN_STRLEN:
6079 target = expand_builtin_strlen (exp, target, target_mode);
6084 case BUILT_IN_STRCPY:
6085 target = expand_builtin_strcpy (exp, target);
6090 case BUILT_IN_STRNCPY:
6091 target = expand_builtin_strncpy (exp, target);
6096 case BUILT_IN_STPCPY:
6097 target = expand_builtin_stpcpy (exp, target, mode);
6102 case BUILT_IN_MEMCPY:
6103 target = expand_builtin_memcpy (exp, target);
6108 case BUILT_IN_MEMPCPY:
6109 target = expand_builtin_mempcpy (exp, target, mode);
6114 case BUILT_IN_MEMSET:
6115 target = expand_builtin_memset (exp, target, mode);
6120 case BUILT_IN_BZERO:
6121 target = expand_builtin_bzero (exp);
6126 case BUILT_IN_STRCMP:
6127 target = expand_builtin_strcmp (exp, target);
6132 case BUILT_IN_STRNCMP:
6133 target = expand_builtin_strncmp (exp, target, mode);
6139 case BUILT_IN_MEMCMP:
6140 target = expand_builtin_memcmp (exp, target, mode);
6145 case BUILT_IN_SETJMP:
6146 /* This should have been lowered to the builtins below. */
6149 case BUILT_IN_SETJMP_SETUP:
6150 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6151 and the receiver label. */
6152 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6154 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6155 VOIDmode, EXPAND_NORMAL);
6156 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6157 rtx label_r = label_rtx (label);
6159 /* This is copied from the handling of non-local gotos. */
6160 expand_builtin_setjmp_setup (buf_addr, label_r);
6161 nonlocal_goto_handler_labels
6162 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6163 nonlocal_goto_handler_labels);
6164 /* ??? Do not let expand_label treat us as such since we would
6165 not want to be both on the list of non-local labels and on
6166 the list of forced labels. */
6167 FORCED_LABEL (label) = 0;
6172 case BUILT_IN_SETJMP_DISPATCHER:
6173 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6174 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6176 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6177 rtx label_r = label_rtx (label);
6179 /* Remove the dispatcher label from the list of non-local labels
6180 since the receiver labels have been added to it above. */
6181 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6186 case BUILT_IN_SETJMP_RECEIVER:
6187 /* __builtin_setjmp_receiver is passed the receiver label. */
6188 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6190 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6191 rtx label_r = label_rtx (label);
6193 expand_builtin_setjmp_receiver (label_r);
6198 /* __builtin_longjmp is passed a pointer to an array of five words.
6199 It's similar to the C library longjmp function but works with
6200 __builtin_setjmp above. */
6201 case BUILT_IN_LONGJMP:
6202 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6204 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6205 VOIDmode, EXPAND_NORMAL);
6206 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6208 if (value != const1_rtx)
6210 error ("%<__builtin_longjmp%> second argument must be 1");
6214 expand_builtin_longjmp (buf_addr, value);
6219 case BUILT_IN_NONLOCAL_GOTO:
6220 target = expand_builtin_nonlocal_goto (exp);
6225 /* This updates the setjmp buffer that is its argument with the value
6226 of the current stack pointer. */
6227 case BUILT_IN_UPDATE_SETJMP_BUF:
6228 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6231 = expand_normal (CALL_EXPR_ARG (exp, 0));
6233 expand_builtin_update_setjmp_buf (buf_addr);
6239 expand_builtin_trap ();
6242 case BUILT_IN_UNREACHABLE:
6243 expand_builtin_unreachable ();
6246 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6247 case BUILT_IN_SIGNBITD32:
6248 case BUILT_IN_SIGNBITD64:
6249 case BUILT_IN_SIGNBITD128:
6250 target = expand_builtin_signbit (exp, target);
6255 /* Various hooks for the DWARF 2 __throw routine. */
6256 case BUILT_IN_UNWIND_INIT:
6257 expand_builtin_unwind_init ();
6259 case BUILT_IN_DWARF_CFA:
6260 return virtual_cfa_rtx;
6261 #ifdef DWARF2_UNWIND_INFO
6262 case BUILT_IN_DWARF_SP_COLUMN:
6263 return expand_builtin_dwarf_sp_column ();
6264 case BUILT_IN_INIT_DWARF_REG_SIZES:
6265 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6268 case BUILT_IN_FROB_RETURN_ADDR:
6269 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6270 case BUILT_IN_EXTRACT_RETURN_ADDR:
6271 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6272 case BUILT_IN_EH_RETURN:
6273 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6274 CALL_EXPR_ARG (exp, 1));
6276 #ifdef EH_RETURN_DATA_REGNO
6277 case BUILT_IN_EH_RETURN_DATA_REGNO:
6278 return expand_builtin_eh_return_data_regno (exp);
6280 case BUILT_IN_EXTEND_POINTER:
6281 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6282 case BUILT_IN_EH_POINTER:
6283 return expand_builtin_eh_pointer (exp);
6284 case BUILT_IN_EH_FILTER:
6285 return expand_builtin_eh_filter (exp);
6286 case BUILT_IN_EH_COPY_VALUES:
6287 return expand_builtin_eh_copy_values (exp);
6289 case BUILT_IN_VA_START:
6290 return expand_builtin_va_start (exp);
6291 case BUILT_IN_VA_END:
6292 return expand_builtin_va_end (exp);
6293 case BUILT_IN_VA_COPY:
6294 return expand_builtin_va_copy (exp);
6295 case BUILT_IN_EXPECT:
6296 return expand_builtin_expect (exp, target);
6297 case BUILT_IN_PREFETCH:
6298 expand_builtin_prefetch (exp);
6301 case BUILT_IN_PROFILE_FUNC_ENTER:
6302 return expand_builtin_profile_func (false);
6303 case BUILT_IN_PROFILE_FUNC_EXIT:
6304 return expand_builtin_profile_func (true);
6306 case BUILT_IN_INIT_TRAMPOLINE:
6307 return expand_builtin_init_trampoline (exp);
6308 case BUILT_IN_ADJUST_TRAMPOLINE:
6309 return expand_builtin_adjust_trampoline (exp);
6312 case BUILT_IN_EXECL:
6313 case BUILT_IN_EXECV:
6314 case BUILT_IN_EXECLP:
6315 case BUILT_IN_EXECLE:
6316 case BUILT_IN_EXECVP:
6317 case BUILT_IN_EXECVE:
6318 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6323 case BUILT_IN_FETCH_AND_ADD_1:
6324 case BUILT_IN_FETCH_AND_ADD_2:
6325 case BUILT_IN_FETCH_AND_ADD_4:
6326 case BUILT_IN_FETCH_AND_ADD_8:
6327 case BUILT_IN_FETCH_AND_ADD_16:
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6329 target = expand_builtin_sync_operation (mode, exp, PLUS,
6330 false, target, ignore);
6335 case BUILT_IN_FETCH_AND_SUB_1:
6336 case BUILT_IN_FETCH_AND_SUB_2:
6337 case BUILT_IN_FETCH_AND_SUB_4:
6338 case BUILT_IN_FETCH_AND_SUB_8:
6339 case BUILT_IN_FETCH_AND_SUB_16:
6340 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6341 target = expand_builtin_sync_operation (mode, exp, MINUS,
6342 false, target, ignore);
6347 case BUILT_IN_FETCH_AND_OR_1:
6348 case BUILT_IN_FETCH_AND_OR_2:
6349 case BUILT_IN_FETCH_AND_OR_4:
6350 case BUILT_IN_FETCH_AND_OR_8:
6351 case BUILT_IN_FETCH_AND_OR_16:
6352 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6353 target = expand_builtin_sync_operation (mode, exp, IOR,
6354 false, target, ignore);
6359 case BUILT_IN_FETCH_AND_AND_1:
6360 case BUILT_IN_FETCH_AND_AND_2:
6361 case BUILT_IN_FETCH_AND_AND_4:
6362 case BUILT_IN_FETCH_AND_AND_8:
6363 case BUILT_IN_FETCH_AND_AND_16:
6364 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6365 target = expand_builtin_sync_operation (mode, exp, AND,
6366 false, target, ignore);
6371 case BUILT_IN_FETCH_AND_XOR_1:
6372 case BUILT_IN_FETCH_AND_XOR_2:
6373 case BUILT_IN_FETCH_AND_XOR_4:
6374 case BUILT_IN_FETCH_AND_XOR_8:
6375 case BUILT_IN_FETCH_AND_XOR_16:
6376 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6377 target = expand_builtin_sync_operation (mode, exp, XOR,
6378 false, target, ignore);
6383 case BUILT_IN_FETCH_AND_NAND_1:
6384 case BUILT_IN_FETCH_AND_NAND_2:
6385 case BUILT_IN_FETCH_AND_NAND_4:
6386 case BUILT_IN_FETCH_AND_NAND_8:
6387 case BUILT_IN_FETCH_AND_NAND_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6389 target = expand_builtin_sync_operation (mode, exp, NOT,
6390 false, target, ignore);
6395 case BUILT_IN_ADD_AND_FETCH_1:
6396 case BUILT_IN_ADD_AND_FETCH_2:
6397 case BUILT_IN_ADD_AND_FETCH_4:
6398 case BUILT_IN_ADD_AND_FETCH_8:
6399 case BUILT_IN_ADD_AND_FETCH_16:
6400 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6401 target = expand_builtin_sync_operation (mode, exp, PLUS,
6402 true, target, ignore);
6407 case BUILT_IN_SUB_AND_FETCH_1:
6408 case BUILT_IN_SUB_AND_FETCH_2:
6409 case BUILT_IN_SUB_AND_FETCH_4:
6410 case BUILT_IN_SUB_AND_FETCH_8:
6411 case BUILT_IN_SUB_AND_FETCH_16:
6412 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6413 target = expand_builtin_sync_operation (mode, exp, MINUS,
6414 true, target, ignore);
6419 case BUILT_IN_OR_AND_FETCH_1:
6420 case BUILT_IN_OR_AND_FETCH_2:
6421 case BUILT_IN_OR_AND_FETCH_4:
6422 case BUILT_IN_OR_AND_FETCH_8:
6423 case BUILT_IN_OR_AND_FETCH_16:
6424 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6425 target = expand_builtin_sync_operation (mode, exp, IOR,
6426 true, target, ignore);
6431 case BUILT_IN_AND_AND_FETCH_1:
6432 case BUILT_IN_AND_AND_FETCH_2:
6433 case BUILT_IN_AND_AND_FETCH_4:
6434 case BUILT_IN_AND_AND_FETCH_8:
6435 case BUILT_IN_AND_AND_FETCH_16:
6436 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6437 target = expand_builtin_sync_operation (mode, exp, AND,
6438 true, target, ignore);
6443 case BUILT_IN_XOR_AND_FETCH_1:
6444 case BUILT_IN_XOR_AND_FETCH_2:
6445 case BUILT_IN_XOR_AND_FETCH_4:
6446 case BUILT_IN_XOR_AND_FETCH_8:
6447 case BUILT_IN_XOR_AND_FETCH_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6449 target = expand_builtin_sync_operation (mode, exp, XOR,
6450 true, target, ignore);
6455 case BUILT_IN_NAND_AND_FETCH_1:
6456 case BUILT_IN_NAND_AND_FETCH_2:
6457 case BUILT_IN_NAND_AND_FETCH_4:
6458 case BUILT_IN_NAND_AND_FETCH_8:
6459 case BUILT_IN_NAND_AND_FETCH_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6461 target = expand_builtin_sync_operation (mode, exp, NOT,
6462 true, target, ignore);
6467 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6468 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6469 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6470 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6471 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6472 if (mode == VOIDmode)
6473 mode = TYPE_MODE (boolean_type_node);
6474 if (!target || !register_operand (target, mode))
6475 target = gen_reg_rtx (mode);
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6478 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6483 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6484 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6485 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6486 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6487 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6489 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6494 case BUILT_IN_LOCK_TEST_AND_SET_1:
6495 case BUILT_IN_LOCK_TEST_AND_SET_2:
6496 case BUILT_IN_LOCK_TEST_AND_SET_4:
6497 case BUILT_IN_LOCK_TEST_AND_SET_8:
6498 case BUILT_IN_LOCK_TEST_AND_SET_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6500 target = expand_builtin_lock_test_and_set (mode, exp, target);
6505 case BUILT_IN_LOCK_RELEASE_1:
6506 case BUILT_IN_LOCK_RELEASE_2:
6507 case BUILT_IN_LOCK_RELEASE_4:
6508 case BUILT_IN_LOCK_RELEASE_8:
6509 case BUILT_IN_LOCK_RELEASE_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6511 expand_builtin_lock_release (mode, exp);
6514 case BUILT_IN_SYNCHRONIZE:
6515 expand_builtin_synchronize ();
6518 case BUILT_IN_OBJECT_SIZE:
6519 return expand_builtin_object_size (exp);
6521 case BUILT_IN_MEMCPY_CHK:
6522 case BUILT_IN_MEMPCPY_CHK:
6523 case BUILT_IN_MEMMOVE_CHK:
6524 case BUILT_IN_MEMSET_CHK:
6525 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6530 case BUILT_IN_STRCPY_CHK:
6531 case BUILT_IN_STPCPY_CHK:
6532 case BUILT_IN_STRNCPY_CHK:
6533 case BUILT_IN_STRCAT_CHK:
6534 case BUILT_IN_STRNCAT_CHK:
6535 case BUILT_IN_SNPRINTF_CHK:
6536 case BUILT_IN_VSNPRINTF_CHK:
6537 maybe_emit_chk_warning (exp, fcode);
6540 case BUILT_IN_SPRINTF_CHK:
6541 case BUILT_IN_VSPRINTF_CHK:
6542 maybe_emit_sprintf_chk_warning (exp, fcode);
6546 maybe_emit_free_warning (exp);
6549 default: /* just do library call, if unknown builtin */
6553 /* The switch statement above can drop through to cause the function
6554 to be called normally. */
6555 return expand_call (exp, target, ignore);
6558 /* Determine whether a tree node represents a call to a built-in
6559 function. If the tree T is a call to a built-in function with
6560 the right number of arguments of the appropriate types, return
6561 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6562 Otherwise the return value is END_BUILTINS. */
6564 enum built_in_function
6565 builtin_mathfn_code (const_tree t)
6567 const_tree fndecl, arg, parmlist;
6568 const_tree argtype, parmtype;
6569 const_call_expr_arg_iterator iter;
6571 if (TREE_CODE (t) != CALL_EXPR
6572 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6573 return END_BUILTINS;
6575 fndecl = get_callee_fndecl (t);
6576 if (fndecl == NULL_TREE
6577 || TREE_CODE (fndecl) != FUNCTION_DECL
6578 || ! DECL_BUILT_IN (fndecl)
6579 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6580 return END_BUILTINS;
6582 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6583 init_const_call_expr_arg_iterator (t, &iter);
6584 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6586 /* If a function doesn't take a variable number of arguments,
6587 the last element in the list will have type `void'. */
6588 parmtype = TREE_VALUE (parmlist);
6589 if (VOID_TYPE_P (parmtype))
6591 if (more_const_call_expr_args_p (&iter))
6592 return END_BUILTINS;
6593 return DECL_FUNCTION_CODE (fndecl);
6596 if (! more_const_call_expr_args_p (&iter))
6597 return END_BUILTINS;
6599 arg = next_const_call_expr_arg (&iter);
6600 argtype = TREE_TYPE (arg);
6602 if (SCALAR_FLOAT_TYPE_P (parmtype))
6604 if (! SCALAR_FLOAT_TYPE_P (argtype))
6605 return END_BUILTINS;
6607 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6609 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6610 return END_BUILTINS;
6612 else if (POINTER_TYPE_P (parmtype))
6614 if (! POINTER_TYPE_P (argtype))
6615 return END_BUILTINS;
6617 else if (INTEGRAL_TYPE_P (parmtype))
6619 if (! INTEGRAL_TYPE_P (argtype))
6620 return END_BUILTINS;
6623 return END_BUILTINS;
6626 /* Variable-length argument list. */
6627 return DECL_FUNCTION_CODE (fndecl);
6630 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6631 evaluate to a constant. */
6634 fold_builtin_constant_p (tree arg)
6636 /* We return 1 for a numeric type that's known to be a constant
6637 value at compile-time or for an aggregate type that's a
6638 literal constant. */
6641 /* If we know this is a constant, emit the constant of one. */
6642 if (CONSTANT_CLASS_P (arg)
6643 || (TREE_CODE (arg) == CONSTRUCTOR
6644 && TREE_CONSTANT (arg)))
6645 return integer_one_node;
6646 if (TREE_CODE (arg) == ADDR_EXPR)
6648 tree op = TREE_OPERAND (arg, 0);
6649 if (TREE_CODE (op) == STRING_CST
6650 || (TREE_CODE (op) == ARRAY_REF
6651 && integer_zerop (TREE_OPERAND (op, 1))
6652 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6653 return integer_one_node;
6656 /* If this expression has side effects, show we don't know it to be a
6657 constant. Likewise if it's a pointer or aggregate type since in
6658 those case we only want literals, since those are only optimized
6659 when generating RTL, not later.
6660 And finally, if we are compiling an initializer, not code, we
6661 need to return a definite result now; there's not going to be any
6662 more optimization done. */
6663 if (TREE_SIDE_EFFECTS (arg)
6664 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6665 || POINTER_TYPE_P (TREE_TYPE (arg))
6667 || folding_initializer)
6668 return integer_zero_node;
6673 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6674 return it as a truthvalue. */
6677 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6679 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6681 fn = built_in_decls[BUILT_IN_EXPECT];
6682 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6683 ret_type = TREE_TYPE (TREE_TYPE (fn));
6684 pred_type = TREE_VALUE (arg_types);
6685 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6687 pred = fold_convert_loc (loc, pred_type, pred);
6688 expected = fold_convert_loc (loc, expected_type, expected);
6689 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6691 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6692 build_int_cst (ret_type, 0));
6695 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6696 NULL_TREE if no simplification is possible. */
6699 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6702 enum tree_code code;
6704 /* If this is a builtin_expect within a builtin_expect keep the
6705 inner one. See through a comparison against a constant. It
6706 might have been added to create a thruthvalue. */
6708 if (COMPARISON_CLASS_P (inner)
6709 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6710 inner = TREE_OPERAND (inner, 0);
6712 if (TREE_CODE (inner) == CALL_EXPR
6713 && (fndecl = get_callee_fndecl (inner))
6714 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6715 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6718 /* Distribute the expected value over short-circuiting operators.
6719 See through the cast from truthvalue_type_node to long. */
6721 while (TREE_CODE (inner) == NOP_EXPR
6722 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6723 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6724 inner = TREE_OPERAND (inner, 0);
6726 code = TREE_CODE (inner);
6727 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6729 tree op0 = TREE_OPERAND (inner, 0);
6730 tree op1 = TREE_OPERAND (inner, 1);
6732 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6733 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6734 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6736 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6739 /* If the argument isn't invariant then there's nothing else we can do. */
6740 if (!TREE_CONSTANT (arg0))
6743 /* If we expect that a comparison against the argument will fold to
6744 a constant return the constant. In practice, this means a true
6745 constant or the address of a non-weak symbol. */
6748 if (TREE_CODE (inner) == ADDR_EXPR)
6752 inner = TREE_OPERAND (inner, 0);
6754 while (TREE_CODE (inner) == COMPONENT_REF
6755 || TREE_CODE (inner) == ARRAY_REF);
6756 if ((TREE_CODE (inner) == VAR_DECL
6757 || TREE_CODE (inner) == FUNCTION_DECL)
6758 && DECL_WEAK (inner))
6762 /* Otherwise, ARG0 already has the proper type for the return value. */
6766 /* Fold a call to __builtin_classify_type with argument ARG. */
6769 fold_builtin_classify_type (tree arg)
6772 return build_int_cst (NULL_TREE, no_type_class);
6774 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6777 /* Fold a call to __builtin_strlen with argument ARG. */
6780 fold_builtin_strlen (location_t loc, tree type, tree arg)
6782 if (!validate_arg (arg, POINTER_TYPE))
6786 tree len = c_strlen (arg, 0);
6789 return fold_convert_loc (loc, type, len);
6795 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6798 fold_builtin_inf (location_t loc, tree type, int warn)
6800 REAL_VALUE_TYPE real;
6802 /* __builtin_inff is intended to be usable to define INFINITY on all
6803 targets. If an infinity is not available, INFINITY expands "to a
6804 positive constant of type float that overflows at translation
6805 time", footnote "In this case, using INFINITY will violate the
6806 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6807 Thus we pedwarn to ensure this constraint violation is
6809 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6810 pedwarn (loc, 0, "target format does not support infinity");
6813 return build_real (type, real);
6816 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6819 fold_builtin_nan (tree arg, tree type, int quiet)
6821 REAL_VALUE_TYPE real;
6824 if (!validate_arg (arg, POINTER_TYPE))
6826 str = c_getstr (arg);
6830 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6833 return build_real (type, real);
6836 /* Return true if the floating point expression T has an integer value.
6837 We also allow +Inf, -Inf and NaN to be considered integer values. */
6840 integer_valued_real_p (tree t)
6842 switch (TREE_CODE (t))
6849 return integer_valued_real_p (TREE_OPERAND (t, 0));
6854 return integer_valued_real_p (TREE_OPERAND (t, 1));
6861 return integer_valued_real_p (TREE_OPERAND (t, 0))
6862 && integer_valued_real_p (TREE_OPERAND (t, 1));
6865 return integer_valued_real_p (TREE_OPERAND (t, 1))
6866 && integer_valued_real_p (TREE_OPERAND (t, 2));
6869 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6873 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6874 if (TREE_CODE (type) == INTEGER_TYPE)
6876 if (TREE_CODE (type) == REAL_TYPE)
6877 return integer_valued_real_p (TREE_OPERAND (t, 0));
6882 switch (builtin_mathfn_code (t))
6884 CASE_FLT_FN (BUILT_IN_CEIL):
6885 CASE_FLT_FN (BUILT_IN_FLOOR):
6886 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6887 CASE_FLT_FN (BUILT_IN_RINT):
6888 CASE_FLT_FN (BUILT_IN_ROUND):
6889 CASE_FLT_FN (BUILT_IN_TRUNC):
6892 CASE_FLT_FN (BUILT_IN_FMIN):
6893 CASE_FLT_FN (BUILT_IN_FMAX):
6894 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6895 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6908 /* FNDECL is assumed to be a builtin where truncation can be propagated
6909 across (for instance floor((double)f) == (double)floorf (f).
6910 Do the transformation for a call with argument ARG. */
6913 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6915 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6917 if (!validate_arg (arg, REAL_TYPE))
6920 /* Integer rounding functions are idempotent. */
6921 if (fcode == builtin_mathfn_code (arg))
6924 /* If argument is already integer valued, and we don't need to worry
6925 about setting errno, there's no need to perform rounding. */
6926 if (! flag_errno_math && integer_valued_real_p (arg))
6931 tree arg0 = strip_float_extensions (arg);
6932 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6933 tree newtype = TREE_TYPE (arg0);
6936 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6937 && (decl = mathfn_built_in (newtype, fcode)))
6938 return fold_convert_loc (loc, ftype,
6939 build_call_expr_loc (loc, decl, 1,
6940 fold_convert_loc (loc,
6947 /* FNDECL is assumed to be builtin which can narrow the FP type of
6948 the argument, for instance lround((double)f) -> lroundf (f).
6949 Do the transformation for a call with argument ARG. */
6952 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6954 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6956 if (!validate_arg (arg, REAL_TYPE))
6959 /* If argument is already integer valued, and we don't need to worry
6960 about setting errno, there's no need to perform rounding. */
6961 if (! flag_errno_math && integer_valued_real_p (arg))
6962 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6963 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6967 tree ftype = TREE_TYPE (arg);
6968 tree arg0 = strip_float_extensions (arg);
6969 tree newtype = TREE_TYPE (arg0);
6972 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6973 && (decl = mathfn_built_in (newtype, fcode)))
6974 return build_call_expr_loc (loc, decl, 1,
6975 fold_convert_loc (loc, newtype, arg0));
6978 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6979 sizeof (long long) == sizeof (long). */
6980 if (TYPE_PRECISION (long_long_integer_type_node)
6981 == TYPE_PRECISION (long_integer_type_node))
6983 tree newfn = NULL_TREE;
6986 CASE_FLT_FN (BUILT_IN_LLCEIL):
6987 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6990 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6991 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6994 CASE_FLT_FN (BUILT_IN_LLROUND):
6995 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6998 CASE_FLT_FN (BUILT_IN_LLRINT):
6999 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7008 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7009 return fold_convert_loc (loc,
7010 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7017 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7018 return type. Return NULL_TREE if no simplification can be made. */
7021 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7025 if (!validate_arg (arg, COMPLEX_TYPE)
7026 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7029 /* Calculate the result when the argument is a constant. */
7030 if (TREE_CODE (arg) == COMPLEX_CST
7031 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7035 if (TREE_CODE (arg) == COMPLEX_EXPR)
7037 tree real = TREE_OPERAND (arg, 0);
7038 tree imag = TREE_OPERAND (arg, 1);
7040 /* If either part is zero, cabs is fabs of the other. */
7041 if (real_zerop (real))
7042 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7043 if (real_zerop (imag))
7044 return fold_build1_loc (loc, ABS_EXPR, type, real);
7046 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7047 if (flag_unsafe_math_optimizations
7048 && operand_equal_p (real, imag, OEP_PURE_SAME))
7050 const REAL_VALUE_TYPE sqrt2_trunc
7051 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7053 return fold_build2_loc (loc, MULT_EXPR, type,
7054 fold_build1_loc (loc, ABS_EXPR, type, real),
7055 build_real (type, sqrt2_trunc));
7059 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7060 if (TREE_CODE (arg) == NEGATE_EXPR
7061 || TREE_CODE (arg) == CONJ_EXPR)
7062 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7064 /* Don't do this when optimizing for size. */
7065 if (flag_unsafe_math_optimizations
7066 && optimize && optimize_function_for_speed_p (cfun))
7068 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7070 if (sqrtfn != NULL_TREE)
7072 tree rpart, ipart, result;
7074 arg = builtin_save_expr (arg);
7076 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7077 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7079 rpart = builtin_save_expr (rpart);
7080 ipart = builtin_save_expr (ipart);
7082 result = fold_build2_loc (loc, PLUS_EXPR, type,
7083 fold_build2_loc (loc, MULT_EXPR, type,
7085 fold_build2_loc (loc, MULT_EXPR, type,
7088 return build_call_expr_loc (loc, sqrtfn, 1, result);
7095 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7096 complex tree type of the result. If NEG is true, the imaginary
7097 zero is negative. */
7100 build_complex_cproj (tree type, bool neg)
7102 REAL_VALUE_TYPE rinf, rzero = dconst0;
7106 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7107 build_real (TREE_TYPE (type), rzero));
7110 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7111 return type. Return NULL_TREE if no simplification can be made. */
7114 fold_builtin_cproj (location_t loc, tree arg, tree type)
7116 if (!validate_arg (arg, COMPLEX_TYPE)
7117 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7120 /* If there are no infinities, return arg. */
7121 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7122 return non_lvalue_loc (loc, arg);
7124 /* Calculate the result when the argument is a constant. */
7125 if (TREE_CODE (arg) == COMPLEX_CST)
7127 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7128 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7130 if (real_isinf (real) || real_isinf (imag))
7131 return build_complex_cproj (type, imag->sign);
7135 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7137 tree real = TREE_OPERAND (arg, 0);
7138 tree imag = TREE_OPERAND (arg, 1);
7143 /* If the real part is inf and the imag part is known to be
7144 nonnegative, return (inf + 0i). Remember side-effects are
7145 possible in the imag part. */
7146 if (TREE_CODE (real) == REAL_CST
7147 && real_isinf (TREE_REAL_CST_PTR (real))
7148 && tree_expr_nonnegative_p (imag))
7149 return omit_one_operand_loc (loc, type,
7150 build_complex_cproj (type, false),
7153 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7154 Remember side-effects are possible in the real part. */
7155 if (TREE_CODE (imag) == REAL_CST
7156 && real_isinf (TREE_REAL_CST_PTR (imag)))
7158 omit_one_operand_loc (loc, type,
7159 build_complex_cproj (type, TREE_REAL_CST_PTR
7160 (imag)->sign), arg);
7166 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7167 Return NULL_TREE if no simplification can be made. */
7170 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7173 enum built_in_function fcode;
7176 if (!validate_arg (arg, REAL_TYPE))
7179 /* Calculate the result when the argument is a constant. */
7180 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7183 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7184 fcode = builtin_mathfn_code (arg);
7185 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7187 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7188 arg = fold_build2_loc (loc, MULT_EXPR, type,
7189 CALL_EXPR_ARG (arg, 0),
7190 build_real (type, dconsthalf));
7191 return build_call_expr_loc (loc, expfn, 1, arg);
7194 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7195 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7197 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7201 tree arg0 = CALL_EXPR_ARG (arg, 0);
7203 /* The inner root was either sqrt or cbrt. */
7204 /* This was a conditional expression but it triggered a bug
7206 REAL_VALUE_TYPE dconstroot;
7207 if (BUILTIN_SQRT_P (fcode))
7208 dconstroot = dconsthalf;
7210 dconstroot = dconst_third ();
7212 /* Adjust for the outer root. */
7213 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7214 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7215 tree_root = build_real (type, dconstroot);
7216 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7220 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7221 if (flag_unsafe_math_optimizations
7222 && (fcode == BUILT_IN_POW
7223 || fcode == BUILT_IN_POWF
7224 || fcode == BUILT_IN_POWL))
7226 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7227 tree arg0 = CALL_EXPR_ARG (arg, 0);
7228 tree arg1 = CALL_EXPR_ARG (arg, 1);
7230 if (!tree_expr_nonnegative_p (arg0))
7231 arg0 = build1 (ABS_EXPR, type, arg0);
7232 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7233 build_real (type, dconsthalf));
7234 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7240 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7241 Return NULL_TREE if no simplification can be made. */
7244 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7246 const enum built_in_function fcode = builtin_mathfn_code (arg);
7249 if (!validate_arg (arg, REAL_TYPE))
7252 /* Calculate the result when the argument is a constant. */
7253 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7256 if (flag_unsafe_math_optimizations)
7258 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7259 if (BUILTIN_EXPONENT_P (fcode))
7261 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7262 const REAL_VALUE_TYPE third_trunc =
7263 real_value_truncate (TYPE_MODE (type), dconst_third ());
7264 arg = fold_build2_loc (loc, MULT_EXPR, type,
7265 CALL_EXPR_ARG (arg, 0),
7266 build_real (type, third_trunc));
7267 return build_call_expr_loc (loc, expfn, 1, arg);
7270 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7271 if (BUILTIN_SQRT_P (fcode))
7273 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7277 tree arg0 = CALL_EXPR_ARG (arg, 0);
7279 REAL_VALUE_TYPE dconstroot = dconst_third ();
7281 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7282 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7283 tree_root = build_real (type, dconstroot);
7284 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7288 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7289 if (BUILTIN_CBRT_P (fcode))
7291 tree arg0 = CALL_EXPR_ARG (arg, 0);
7292 if (tree_expr_nonnegative_p (arg0))
7294 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7299 REAL_VALUE_TYPE dconstroot;
7301 real_arithmetic (&dconstroot, MULT_EXPR,
7302 dconst_third_ptr (), dconst_third_ptr ());
7303 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7304 tree_root = build_real (type, dconstroot);
7305 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7310 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7311 if (fcode == BUILT_IN_POW
7312 || fcode == BUILT_IN_POWF
7313 || fcode == BUILT_IN_POWL)
7315 tree arg00 = CALL_EXPR_ARG (arg, 0);
7316 tree arg01 = CALL_EXPR_ARG (arg, 1);
7317 if (tree_expr_nonnegative_p (arg00))
7319 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7320 const REAL_VALUE_TYPE dconstroot
7321 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7322 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7323 build_real (type, dconstroot));
7324 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7331 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7332 TYPE is the type of the return value. Return NULL_TREE if no
7333 simplification can be made. */
7336 fold_builtin_cos (location_t loc,
7337 tree arg, tree type, tree fndecl)
7341 if (!validate_arg (arg, REAL_TYPE))
7344 /* Calculate the result when the argument is a constant. */
7345 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7348 /* Optimize cos(-x) into cos (x). */
7349 if ((narg = fold_strip_sign_ops (arg)))
7350 return build_call_expr_loc (loc, fndecl, 1, narg);
7355 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7356 Return NULL_TREE if no simplification can be made. */
7359 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7361 if (validate_arg (arg, REAL_TYPE))
7365 /* Calculate the result when the argument is a constant. */
7366 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7369 /* Optimize cosh(-x) into cosh (x). */
7370 if ((narg = fold_strip_sign_ops (arg)))
7371 return build_call_expr_loc (loc, fndecl, 1, narg);
7377 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7378 argument ARG. TYPE is the type of the return value. Return
7379 NULL_TREE if no simplification can be made. */
7382 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7385 if (validate_arg (arg, COMPLEX_TYPE)
7386 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7390 /* Calculate the result when the argument is a constant. */
7391 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7394 /* Optimize fn(-x) into fn(x). */
7395 if ((tmp = fold_strip_sign_ops (arg)))
7396 return build_call_expr_loc (loc, fndecl, 1, tmp);
7402 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7403 Return NULL_TREE if no simplification can be made. */
7406 fold_builtin_tan (tree arg, tree type)
7408 enum built_in_function fcode;
7411 if (!validate_arg (arg, REAL_TYPE))
7414 /* Calculate the result when the argument is a constant. */
7415 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7418 /* Optimize tan(atan(x)) = x. */
7419 fcode = builtin_mathfn_code (arg);
7420 if (flag_unsafe_math_optimizations
7421 && (fcode == BUILT_IN_ATAN
7422 || fcode == BUILT_IN_ATANF
7423 || fcode == BUILT_IN_ATANL))
7424 return CALL_EXPR_ARG (arg, 0);
7429 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7430 NULL_TREE if no simplification can be made. */
7433 fold_builtin_sincos (location_t loc,
7434 tree arg0, tree arg1, tree arg2)
7439 if (!validate_arg (arg0, REAL_TYPE)
7440 || !validate_arg (arg1, POINTER_TYPE)
7441 || !validate_arg (arg2, POINTER_TYPE))
7444 type = TREE_TYPE (arg0);
7446 /* Calculate the result when the argument is a constant. */
7447 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7450 /* Canonicalize sincos to cexpi. */
7451 if (!TARGET_C99_FUNCTIONS)
7453 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7457 call = build_call_expr_loc (loc, fn, 1, arg0);
7458 call = builtin_save_expr (call);
7460 return build2 (COMPOUND_EXPR, void_type_node,
7461 build2 (MODIFY_EXPR, void_type_node,
7462 build_fold_indirect_ref_loc (loc, arg1),
7463 build1 (IMAGPART_EXPR, type, call)),
7464 build2 (MODIFY_EXPR, void_type_node,
7465 build_fold_indirect_ref_loc (loc, arg2),
7466 build1 (REALPART_EXPR, type, call)));
7469 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7470 NULL_TREE if no simplification can be made. */
7473 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7476 tree realp, imagp, ifn;
7479 if (!validate_arg (arg0, COMPLEX_TYPE)
7480 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7483 /* Calculate the result when the argument is a constant. */
7484 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7487 rtype = TREE_TYPE (TREE_TYPE (arg0));
7489 /* In case we can figure out the real part of arg0 and it is constant zero
7491 if (!TARGET_C99_FUNCTIONS)
7493 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7497 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7498 && real_zerop (realp))
7500 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7501 return build_call_expr_loc (loc, ifn, 1, narg);
7504 /* In case we can easily decompose real and imaginary parts split cexp
7505 to exp (r) * cexpi (i). */
7506 if (flag_unsafe_math_optimizations
7509 tree rfn, rcall, icall;
7511 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7515 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7519 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7520 icall = builtin_save_expr (icall);
7521 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7522 rcall = builtin_save_expr (rcall);
7523 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7524 fold_build2_loc (loc, MULT_EXPR, rtype,
7526 fold_build1_loc (loc, REALPART_EXPR,
7528 fold_build2_loc (loc, MULT_EXPR, rtype,
7530 fold_build1_loc (loc, IMAGPART_EXPR,
7537 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7538 Return NULL_TREE if no simplification can be made. */
7541 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7543 if (!validate_arg (arg, REAL_TYPE))
7546 /* Optimize trunc of constant value. */
7547 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7549 REAL_VALUE_TYPE r, x;
7550 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7552 x = TREE_REAL_CST (arg);
7553 real_trunc (&r, TYPE_MODE (type), &x);
7554 return build_real (type, r);
7557 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7560 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7561 Return NULL_TREE if no simplification can be made. */
7564 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7566 if (!validate_arg (arg, REAL_TYPE))
7569 /* Optimize floor of constant value. */
7570 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7574 x = TREE_REAL_CST (arg);
7575 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7577 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7580 real_floor (&r, TYPE_MODE (type), &x);
7581 return build_real (type, r);
7585 /* Fold floor (x) where x is nonnegative to trunc (x). */
7586 if (tree_expr_nonnegative_p (arg))
7588 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7590 return build_call_expr_loc (loc, truncfn, 1, arg);
7593 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7596 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7597 Return NULL_TREE if no simplification can be made. */
7600 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7602 if (!validate_arg (arg, REAL_TYPE))
7605 /* Optimize ceil of constant value. */
7606 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7610 x = TREE_REAL_CST (arg);
7611 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7613 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7616 real_ceil (&r, TYPE_MODE (type), &x);
7617 return build_real (type, r);
7621 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7624 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7625 Return NULL_TREE if no simplification can be made. */
7628 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7630 if (!validate_arg (arg, REAL_TYPE))
7633 /* Optimize round of constant value. */
7634 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7638 x = TREE_REAL_CST (arg);
7639 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7641 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7644 real_round (&r, TYPE_MODE (type), &x);
7645 return build_real (type, r);
7649 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7652 /* Fold function call to builtin lround, lroundf or lroundl (or the
7653 corresponding long long versions) and other rounding functions. ARG
7654 is the argument to the call. Return NULL_TREE if no simplification
7658 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7660 if (!validate_arg (arg, REAL_TYPE))
7663 /* Optimize lround of constant value. */
7664 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7666 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7668 if (real_isfinite (&x))
7670 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7671 tree ftype = TREE_TYPE (arg);
7675 switch (DECL_FUNCTION_CODE (fndecl))
7677 CASE_FLT_FN (BUILT_IN_LFLOOR):
7678 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7679 real_floor (&r, TYPE_MODE (ftype), &x);
7682 CASE_FLT_FN (BUILT_IN_LCEIL):
7683 CASE_FLT_FN (BUILT_IN_LLCEIL):
7684 real_ceil (&r, TYPE_MODE (ftype), &x);
7687 CASE_FLT_FN (BUILT_IN_LROUND):
7688 CASE_FLT_FN (BUILT_IN_LLROUND):
7689 real_round (&r, TYPE_MODE (ftype), &x);
7696 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7697 if (double_int_fits_to_tree_p (itype, val))
7698 return double_int_to_tree (itype, val);
7702 switch (DECL_FUNCTION_CODE (fndecl))
7704 CASE_FLT_FN (BUILT_IN_LFLOOR):
7705 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7706 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7707 if (tree_expr_nonnegative_p (arg))
7708 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7709 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7714 return fold_fixed_mathfn (loc, fndecl, arg);
7717 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7718 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7719 the argument to the call. Return NULL_TREE if no simplification can
7723 fold_builtin_bitop (tree fndecl, tree arg)
7725 if (!validate_arg (arg, INTEGER_TYPE))
7728 /* Optimize for constant argument. */
7729 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7731 HOST_WIDE_INT hi, width, result;
7732 unsigned HOST_WIDE_INT lo;
7735 type = TREE_TYPE (arg);
7736 width = TYPE_PRECISION (type);
7737 lo = TREE_INT_CST_LOW (arg);
7739 /* Clear all the bits that are beyond the type's precision. */
7740 if (width > HOST_BITS_PER_WIDE_INT)
7742 hi = TREE_INT_CST_HIGH (arg);
7743 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7744 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7749 if (width < HOST_BITS_PER_WIDE_INT)
7750 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7753 switch (DECL_FUNCTION_CODE (fndecl))
7755 CASE_INT_FN (BUILT_IN_FFS):
7757 result = ffs_hwi (lo);
7759 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7764 CASE_INT_FN (BUILT_IN_CLZ):
7766 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7768 result = width - floor_log2 (lo) - 1;
7769 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7773 CASE_INT_FN (BUILT_IN_CTZ):
7775 result = ctz_hwi (lo);
7777 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7778 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7782 CASE_INT_FN (BUILT_IN_POPCOUNT):
7785 result++, lo &= lo - 1;
7787 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7790 CASE_INT_FN (BUILT_IN_PARITY):
7793 result++, lo &= lo - 1;
7795 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7803 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7809 /* Fold function call to builtin_bswap and the long and long long
7810 variants. Return NULL_TREE if no simplification can be made. */
7812 fold_builtin_bswap (tree fndecl, tree arg)
7814 if (! validate_arg (arg, INTEGER_TYPE))
7817 /* Optimize constant value. */
7818 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7820 HOST_WIDE_INT hi, width, r_hi = 0;
7821 unsigned HOST_WIDE_INT lo, r_lo = 0;
7824 type = TREE_TYPE (arg);
7825 width = TYPE_PRECISION (type);
7826 lo = TREE_INT_CST_LOW (arg);
7827 hi = TREE_INT_CST_HIGH (arg);
7829 switch (DECL_FUNCTION_CODE (fndecl))
7831 case BUILT_IN_BSWAP32:
7832 case BUILT_IN_BSWAP64:
7836 for (s = 0; s < width; s += 8)
7838 int d = width - s - 8;
7839 unsigned HOST_WIDE_INT byte;
7841 if (s < HOST_BITS_PER_WIDE_INT)
7842 byte = (lo >> s) & 0xff;
7844 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7846 if (d < HOST_BITS_PER_WIDE_INT)
7849 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7859 if (width < HOST_BITS_PER_WIDE_INT)
7860 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7862 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7868 /* A subroutine of fold_builtin to fold the various logarithmic
7869 functions. Return NULL_TREE if no simplification can me made.
7870 FUNC is the corresponding MPFR logarithm function. */
7873 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7874 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7876 if (validate_arg (arg, REAL_TYPE))
7878 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7880 const enum built_in_function fcode = builtin_mathfn_code (arg);
7882 /* Calculate the result when the argument is a constant. */
7883 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7886 /* Special case, optimize logN(expN(x)) = x. */
7887 if (flag_unsafe_math_optimizations
7888 && ((func == mpfr_log
7889 && (fcode == BUILT_IN_EXP
7890 || fcode == BUILT_IN_EXPF
7891 || fcode == BUILT_IN_EXPL))
7892 || (func == mpfr_log2
7893 && (fcode == BUILT_IN_EXP2
7894 || fcode == BUILT_IN_EXP2F
7895 || fcode == BUILT_IN_EXP2L))
7896 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7897 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7899 /* Optimize logN(func()) for various exponential functions. We
7900 want to determine the value "x" and the power "exponent" in
7901 order to transform logN(x**exponent) into exponent*logN(x). */
7902 if (flag_unsafe_math_optimizations)
7904 tree exponent = 0, x = 0;
7908 CASE_FLT_FN (BUILT_IN_EXP):
7909 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7910 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7912 exponent = CALL_EXPR_ARG (arg, 0);
7914 CASE_FLT_FN (BUILT_IN_EXP2):
7915 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7916 x = build_real (type, dconst2);
7917 exponent = CALL_EXPR_ARG (arg, 0);
7919 CASE_FLT_FN (BUILT_IN_EXP10):
7920 CASE_FLT_FN (BUILT_IN_POW10):
7921 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7923 REAL_VALUE_TYPE dconst10;
7924 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7925 x = build_real (type, dconst10);
7927 exponent = CALL_EXPR_ARG (arg, 0);
7929 CASE_FLT_FN (BUILT_IN_SQRT):
7930 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7931 x = CALL_EXPR_ARG (arg, 0);
7932 exponent = build_real (type, dconsthalf);
7934 CASE_FLT_FN (BUILT_IN_CBRT):
7935 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7936 x = CALL_EXPR_ARG (arg, 0);
7937 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7940 CASE_FLT_FN (BUILT_IN_POW):
7941 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7942 x = CALL_EXPR_ARG (arg, 0);
7943 exponent = CALL_EXPR_ARG (arg, 1);
7949 /* Now perform the optimization. */
7952 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7953 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7961 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7962 NULL_TREE if no simplification can be made. */
7965 fold_builtin_hypot (location_t loc, tree fndecl,
7966 tree arg0, tree arg1, tree type)
7968 tree res, narg0, narg1;
7970 if (!validate_arg (arg0, REAL_TYPE)
7971 || !validate_arg (arg1, REAL_TYPE))
7974 /* Calculate the result when the argument is a constant. */
7975 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7978 /* If either argument to hypot has a negate or abs, strip that off.
7979 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7980 narg0 = fold_strip_sign_ops (arg0);
7981 narg1 = fold_strip_sign_ops (arg1);
7984 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7985 narg1 ? narg1 : arg1);
7988 /* If either argument is zero, hypot is fabs of the other. */
7989 if (real_zerop (arg0))
7990 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7991 else if (real_zerop (arg1))
7992 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7994 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7995 if (flag_unsafe_math_optimizations
7996 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7998 const REAL_VALUE_TYPE sqrt2_trunc
7999 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8000 return fold_build2_loc (loc, MULT_EXPR, type,
8001 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8002 build_real (type, sqrt2_trunc));
8009 /* Fold a builtin function call to pow, powf, or powl. Return
8010 NULL_TREE if no simplification can be made. */
8012 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8016 if (!validate_arg (arg0, REAL_TYPE)
8017 || !validate_arg (arg1, REAL_TYPE))
8020 /* Calculate the result when the argument is a constant. */
8021 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8024 /* Optimize pow(1.0,y) = 1.0. */
8025 if (real_onep (arg0))
8026 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8028 if (TREE_CODE (arg1) == REAL_CST
8029 && !TREE_OVERFLOW (arg1))
8031 REAL_VALUE_TYPE cint;
8035 c = TREE_REAL_CST (arg1);
8037 /* Optimize pow(x,0.0) = 1.0. */
8038 if (REAL_VALUES_EQUAL (c, dconst0))
8039 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8042 /* Optimize pow(x,1.0) = x. */
8043 if (REAL_VALUES_EQUAL (c, dconst1))
8046 /* Optimize pow(x,-1.0) = 1.0/x. */
8047 if (REAL_VALUES_EQUAL (c, dconstm1))
8048 return fold_build2_loc (loc, RDIV_EXPR, type,
8049 build_real (type, dconst1), arg0);
8051 /* Optimize pow(x,0.5) = sqrt(x). */
8052 if (flag_unsafe_math_optimizations
8053 && REAL_VALUES_EQUAL (c, dconsthalf))
8055 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8057 if (sqrtfn != NULL_TREE)
8058 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8061 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8062 if (flag_unsafe_math_optimizations)
8064 const REAL_VALUE_TYPE dconstroot
8065 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8067 if (REAL_VALUES_EQUAL (c, dconstroot))
8069 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8070 if (cbrtfn != NULL_TREE)
8071 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8075 /* Check for an integer exponent. */
8076 n = real_to_integer (&c);
8077 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8078 if (real_identical (&c, &cint))
8080 /* Attempt to evaluate pow at compile-time, unless this should
8081 raise an exception. */
8082 if (TREE_CODE (arg0) == REAL_CST
8083 && !TREE_OVERFLOW (arg0)
8085 || (!flag_trapping_math && !flag_errno_math)
8086 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8091 x = TREE_REAL_CST (arg0);
8092 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8093 if (flag_unsafe_math_optimizations || !inexact)
8094 return build_real (type, x);
8097 /* Strip sign ops from even integer powers. */
8098 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8100 tree narg0 = fold_strip_sign_ops (arg0);
8102 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8107 if (flag_unsafe_math_optimizations)
8109 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8111 /* Optimize pow(expN(x),y) = expN(x*y). */
8112 if (BUILTIN_EXPONENT_P (fcode))
8114 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8115 tree arg = CALL_EXPR_ARG (arg0, 0);
8116 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8117 return build_call_expr_loc (loc, expfn, 1, arg);
8120 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8121 if (BUILTIN_SQRT_P (fcode))
8123 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8124 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8125 build_real (type, dconsthalf));
8126 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8129 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8130 if (BUILTIN_CBRT_P (fcode))
8132 tree arg = CALL_EXPR_ARG (arg0, 0);
8133 if (tree_expr_nonnegative_p (arg))
8135 const REAL_VALUE_TYPE dconstroot
8136 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8137 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8138 build_real (type, dconstroot));
8139 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8143 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8144 if (fcode == BUILT_IN_POW
8145 || fcode == BUILT_IN_POWF
8146 || fcode == BUILT_IN_POWL)
8148 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8149 if (tree_expr_nonnegative_p (arg00))
8151 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8152 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8153 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8161 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8162 Return NULL_TREE if no simplification can be made. */
8164 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8165 tree arg0, tree arg1, tree type)
8167 if (!validate_arg (arg0, REAL_TYPE)
8168 || !validate_arg (arg1, INTEGER_TYPE))
8171 /* Optimize pow(1.0,y) = 1.0. */
8172 if (real_onep (arg0))
8173 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8175 if (host_integerp (arg1, 0))
8177 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8179 /* Evaluate powi at compile-time. */
8180 if (TREE_CODE (arg0) == REAL_CST
8181 && !TREE_OVERFLOW (arg0))
8184 x = TREE_REAL_CST (arg0);
8185 real_powi (&x, TYPE_MODE (type), &x, c);
8186 return build_real (type, x);
8189 /* Optimize pow(x,0) = 1.0. */
8191 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8194 /* Optimize pow(x,1) = x. */
8198 /* Optimize pow(x,-1) = 1.0/x. */
8200 return fold_build2_loc (loc, RDIV_EXPR, type,
8201 build_real (type, dconst1), arg0);
8207 /* A subroutine of fold_builtin to fold the various exponent
8208 functions. Return NULL_TREE if no simplification can be made.
8209 FUNC is the corresponding MPFR exponent function. */
8212 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8213 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8215 if (validate_arg (arg, REAL_TYPE))
8217 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8220 /* Calculate the result when the argument is a constant. */
8221 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8224 /* Optimize expN(logN(x)) = x. */
8225 if (flag_unsafe_math_optimizations)
8227 const enum built_in_function fcode = builtin_mathfn_code (arg);
8229 if ((func == mpfr_exp
8230 && (fcode == BUILT_IN_LOG
8231 || fcode == BUILT_IN_LOGF
8232 || fcode == BUILT_IN_LOGL))
8233 || (func == mpfr_exp2
8234 && (fcode == BUILT_IN_LOG2
8235 || fcode == BUILT_IN_LOG2F
8236 || fcode == BUILT_IN_LOG2L))
8237 || (func == mpfr_exp10
8238 && (fcode == BUILT_IN_LOG10
8239 || fcode == BUILT_IN_LOG10F
8240 || fcode == BUILT_IN_LOG10L)))
8241 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8248 /* Return true if VAR is a VAR_DECL or a component thereof. */
8251 var_decl_component_p (tree var)
8254 while (handled_component_p (inner))
8255 inner = TREE_OPERAND (inner, 0);
8256 return SSA_VAR_P (inner);
8259 /* Fold function call to builtin memset. Return
8260 NULL_TREE if no simplification can be made. */
8263 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8264 tree type, bool ignore)
8266 tree var, ret, etype;
8267 unsigned HOST_WIDE_INT length, cval;
8269 if (! validate_arg (dest, POINTER_TYPE)
8270 || ! validate_arg (c, INTEGER_TYPE)
8271 || ! validate_arg (len, INTEGER_TYPE))
8274 if (! host_integerp (len, 1))
8277 /* If the LEN parameter is zero, return DEST. */
8278 if (integer_zerop (len))
8279 return omit_one_operand_loc (loc, type, dest, c);
8281 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8286 if (TREE_CODE (var) != ADDR_EXPR)
8289 var = TREE_OPERAND (var, 0);
8290 if (TREE_THIS_VOLATILE (var))
8293 etype = TREE_TYPE (var);
8294 if (TREE_CODE (etype) == ARRAY_TYPE)
8295 etype = TREE_TYPE (etype);
8297 if (!INTEGRAL_TYPE_P (etype)
8298 && !POINTER_TYPE_P (etype))
8301 if (! var_decl_component_p (var))
8304 length = tree_low_cst (len, 1);
8305 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8306 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8310 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8313 if (integer_zerop (c))
8317 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8320 cval = tree_low_cst (c, 1);
8324 cval |= (cval << 31) << 1;
8327 ret = build_int_cst_type (etype, cval);
8328 var = build_fold_indirect_ref_loc (loc,
8329 fold_convert_loc (loc,
8330 build_pointer_type (etype),
8332 ret = build2 (MODIFY_EXPR, etype, var, ret);
8336 return omit_one_operand_loc (loc, type, dest, ret);
8339 /* Fold function call to builtin memset. Return
8340 NULL_TREE if no simplification can be made. */
8343 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8345 if (! validate_arg (dest, POINTER_TYPE)
8346 || ! validate_arg (size, INTEGER_TYPE))
8352 /* New argument list transforming bzero(ptr x, int y) to
8353 memset(ptr x, int 0, size_t y). This is done this way
8354 so that if it isn't expanded inline, we fallback to
8355 calling bzero instead of memset. */
8357 return fold_builtin_memset (loc, dest, integer_zero_node,
8358 fold_convert_loc (loc, sizetype, size),
8359 void_type_node, ignore);
8362 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8363 NULL_TREE if no simplification can be made.
8364 If ENDP is 0, return DEST (like memcpy).
8365 If ENDP is 1, return DEST+LEN (like mempcpy).
8366 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8367 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8371 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8372 tree len, tree type, bool ignore, int endp)
8374 tree destvar, srcvar, expr;
8376 if (! validate_arg (dest, POINTER_TYPE)
8377 || ! validate_arg (src, POINTER_TYPE)
8378 || ! validate_arg (len, INTEGER_TYPE))
8381 /* If the LEN parameter is zero, return DEST. */
8382 if (integer_zerop (len))
8383 return omit_one_operand_loc (loc, type, dest, src);
8385 /* If SRC and DEST are the same (and not volatile), return
8386 DEST{,+LEN,+LEN-1}. */
8387 if (operand_equal_p (src, dest, 0))
8391 tree srctype, desttype;
8392 unsigned int src_align, dest_align;
8397 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8398 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8400 /* Both DEST and SRC must be pointer types.
8401 ??? This is what old code did. Is the testing for pointer types
8404 If either SRC is readonly or length is 1, we can use memcpy. */
8405 if (!dest_align || !src_align)
8407 if (readonly_data_expr (src)
8408 || (host_integerp (len, 1)
8409 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8410 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8412 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8415 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8418 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8419 if (TREE_CODE (src) == ADDR_EXPR
8420 && TREE_CODE (dest) == ADDR_EXPR)
8422 tree src_base, dest_base, fn;
8423 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8424 HOST_WIDE_INT size = -1;
8425 HOST_WIDE_INT maxsize = -1;
8427 srcvar = TREE_OPERAND (src, 0);
8428 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8430 destvar = TREE_OPERAND (dest, 0);
8431 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8433 if (host_integerp (len, 1))
8434 maxsize = tree_low_cst (len, 1);
8437 src_offset /= BITS_PER_UNIT;
8438 dest_offset /= BITS_PER_UNIT;
8439 if (SSA_VAR_P (src_base)
8440 && SSA_VAR_P (dest_base))
8442 if (operand_equal_p (src_base, dest_base, 0)
8443 && ranges_overlap_p (src_offset, maxsize,
8444 dest_offset, maxsize))
8447 else if (TREE_CODE (src_base) == MEM_REF
8448 && TREE_CODE (dest_base) == MEM_REF)
8451 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8452 TREE_OPERAND (dest_base, 0), 0))
8454 off = double_int_add (mem_ref_offset (src_base),
8455 shwi_to_double_int (src_offset));
8456 if (!double_int_fits_in_shwi_p (off))
8458 src_offset = off.low;
8459 off = double_int_add (mem_ref_offset (dest_base),
8460 shwi_to_double_int (dest_offset));
8461 if (!double_int_fits_in_shwi_p (off))
8463 dest_offset = off.low;
8464 if (ranges_overlap_p (src_offset, maxsize,
8465 dest_offset, maxsize))
8471 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8474 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8477 /* If the destination and source do not alias optimize into
8479 if ((is_gimple_min_invariant (dest)
8480 || TREE_CODE (dest) == SSA_NAME)
8481 && (is_gimple_min_invariant (src)
8482 || TREE_CODE (src) == SSA_NAME))
8485 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8486 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8487 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8490 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8493 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8500 if (!host_integerp (len, 0))
8503 This logic lose for arguments like (type *)malloc (sizeof (type)),
8504 since we strip the casts of up to VOID return value from malloc.
8505 Perhaps we ought to inherit type from non-VOID argument here? */
8508 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8509 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8511 tree tem = TREE_OPERAND (src, 0);
8513 if (tem != TREE_OPERAND (src, 0))
8514 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8516 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8518 tree tem = TREE_OPERAND (dest, 0);
8520 if (tem != TREE_OPERAND (dest, 0))
8521 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8523 srctype = TREE_TYPE (TREE_TYPE (src));
8525 && TREE_CODE (srctype) == ARRAY_TYPE
8526 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8528 srctype = TREE_TYPE (srctype);
8530 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8532 desttype = TREE_TYPE (TREE_TYPE (dest));
8534 && TREE_CODE (desttype) == ARRAY_TYPE
8535 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8537 desttype = TREE_TYPE (desttype);
8539 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8541 if (!srctype || !desttype
8542 || TREE_ADDRESSABLE (srctype)
8543 || TREE_ADDRESSABLE (desttype)
8544 || !TYPE_SIZE_UNIT (srctype)
8545 || !TYPE_SIZE_UNIT (desttype)
8546 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8547 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8550 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8551 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8552 if (dest_align < TYPE_ALIGN (desttype)
8553 || src_align < TYPE_ALIGN (srctype))
8557 dest = builtin_save_expr (dest);
8559 /* Build accesses at offset zero with a ref-all character type. */
8560 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8561 ptr_mode, true), 0);
8564 STRIP_NOPS (destvar);
8565 if (TREE_CODE (destvar) == ADDR_EXPR
8566 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8567 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8568 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8570 destvar = NULL_TREE;
8573 STRIP_NOPS (srcvar);
8574 if (TREE_CODE (srcvar) == ADDR_EXPR
8575 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8576 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8577 && (!STRICT_ALIGNMENT
8579 || src_align >= TYPE_ALIGN (desttype)))
8580 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8585 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8588 if (srcvar == NULL_TREE)
8590 if (STRICT_ALIGNMENT
8591 && src_align < TYPE_ALIGN (desttype))
8594 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8596 else if (destvar == NULL_TREE)
8598 if (STRICT_ALIGNMENT
8599 && dest_align < TYPE_ALIGN (srctype))
8602 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8605 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8611 if (endp == 0 || endp == 3)
8612 return omit_one_operand_loc (loc, type, dest, expr);
8618 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8621 len = fold_convert_loc (loc, sizetype, len);
8622 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8623 dest = fold_convert_loc (loc, type, dest);
8625 dest = omit_one_operand_loc (loc, type, dest, expr);
8629 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8630 If LEN is not NULL, it represents the length of the string to be
8631 copied. Return NULL_TREE if no simplification can be made. */
8634 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8638 if (!validate_arg (dest, POINTER_TYPE)
8639 || !validate_arg (src, POINTER_TYPE))
8642 /* If SRC and DEST are the same (and not volatile), return DEST. */
8643 if (operand_equal_p (src, dest, 0))
8644 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8646 if (optimize_function_for_size_p (cfun))
8649 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8655 len = c_strlen (src, 1);
8656 if (! len || TREE_SIDE_EFFECTS (len))
8660 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8661 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8662 build_call_expr_loc (loc, fn, 3, dest, src, len));
8665 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8666 Return NULL_TREE if no simplification can be made. */
8669 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8671 tree fn, len, lenp1, call, type;
8673 if (!validate_arg (dest, POINTER_TYPE)
8674 || !validate_arg (src, POINTER_TYPE))
8677 len = c_strlen (src, 1);
8679 || TREE_CODE (len) != INTEGER_CST)
8682 if (optimize_function_for_size_p (cfun)
8683 /* If length is zero it's small enough. */
8684 && !integer_zerop (len))
8687 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8691 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8692 /* We use dest twice in building our expression. Save it from
8693 multiple expansions. */
8694 dest = builtin_save_expr (dest);
8695 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8697 type = TREE_TYPE (TREE_TYPE (fndecl));
8698 len = fold_convert_loc (loc, sizetype, len);
8699 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8700 dest = fold_convert_loc (loc, type, dest);
8701 dest = omit_one_operand_loc (loc, type, dest, call);
8705 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8706 If SLEN is not NULL, it represents the length of the source string.
8707 Return NULL_TREE if no simplification can be made. */
8710 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8711 tree src, tree len, tree slen)
8715 if (!validate_arg (dest, POINTER_TYPE)
8716 || !validate_arg (src, POINTER_TYPE)
8717 || !validate_arg (len, INTEGER_TYPE))
8720 /* If the LEN parameter is zero, return DEST. */
8721 if (integer_zerop (len))
8722 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8724 /* We can't compare slen with len as constants below if len is not a
8726 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8730 slen = c_strlen (src, 1);
8732 /* Now, we must be passed a constant src ptr parameter. */
8733 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8736 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8738 /* We do not support simplification of this case, though we do
8739 support it when expanding trees into RTL. */
8740 /* FIXME: generate a call to __builtin_memset. */
8741 if (tree_int_cst_lt (slen, len))
8744 /* OK transform into builtin memcpy. */
8745 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8748 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8749 build_call_expr_loc (loc, fn, 3, dest, src, len));
8752 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8753 arguments to the call, and TYPE is its return type.
8754 Return NULL_TREE if no simplification can be made. */
8757 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8759 if (!validate_arg (arg1, POINTER_TYPE)
8760 || !validate_arg (arg2, INTEGER_TYPE)
8761 || !validate_arg (len, INTEGER_TYPE))
8767 if (TREE_CODE (arg2) != INTEGER_CST
8768 || !host_integerp (len, 1))
8771 p1 = c_getstr (arg1);
8772 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8778 if (target_char_cast (arg2, &c))
8781 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8784 return build_int_cst (TREE_TYPE (arg1), 0);
8786 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8788 return fold_convert_loc (loc, type, tem);
8794 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8795 Return NULL_TREE if no simplification can be made. */
8798 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8800 const char *p1, *p2;
8802 if (!validate_arg (arg1, POINTER_TYPE)
8803 || !validate_arg (arg2, POINTER_TYPE)
8804 || !validate_arg (len, INTEGER_TYPE))
8807 /* If the LEN parameter is zero, return zero. */
8808 if (integer_zerop (len))
8809 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8812 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8813 if (operand_equal_p (arg1, arg2, 0))
8814 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8816 p1 = c_getstr (arg1);
8817 p2 = c_getstr (arg2);
8819 /* If all arguments are constant, and the value of len is not greater
8820 than the lengths of arg1 and arg2, evaluate at compile-time. */
8821 if (host_integerp (len, 1) && p1 && p2
8822 && compare_tree_int (len, strlen (p1) + 1) <= 0
8823 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8825 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8828 return integer_one_node;
8830 return integer_minus_one_node;
8832 return integer_zero_node;
8835 /* If len parameter is one, return an expression corresponding to
8836 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8837 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8839 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8840 tree cst_uchar_ptr_node
8841 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8844 = fold_convert_loc (loc, integer_type_node,
8845 build1 (INDIRECT_REF, cst_uchar_node,
8846 fold_convert_loc (loc,
8850 = fold_convert_loc (loc, integer_type_node,
8851 build1 (INDIRECT_REF, cst_uchar_node,
8852 fold_convert_loc (loc,
8855 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8861 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8862 Return NULL_TREE if no simplification can be made. */
8865 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8867 const char *p1, *p2;
8869 if (!validate_arg (arg1, POINTER_TYPE)
8870 || !validate_arg (arg2, POINTER_TYPE))
8873 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8874 if (operand_equal_p (arg1, arg2, 0))
8875 return integer_zero_node;
8877 p1 = c_getstr (arg1);
8878 p2 = c_getstr (arg2);
8882 const int i = strcmp (p1, p2);
8884 return integer_minus_one_node;
8886 return integer_one_node;
8888 return integer_zero_node;
8891 /* If the second arg is "", return *(const unsigned char*)arg1. */
8892 if (p2 && *p2 == '\0')
8894 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8895 tree cst_uchar_ptr_node
8896 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8898 return fold_convert_loc (loc, integer_type_node,
8899 build1 (INDIRECT_REF, cst_uchar_node,
8900 fold_convert_loc (loc,
8905 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8906 if (p1 && *p1 == '\0')
8908 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8909 tree cst_uchar_ptr_node
8910 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8913 = fold_convert_loc (loc, integer_type_node,
8914 build1 (INDIRECT_REF, cst_uchar_node,
8915 fold_convert_loc (loc,
8918 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8924 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8925 Return NULL_TREE if no simplification can be made. */
8928 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8930 const char *p1, *p2;
8932 if (!validate_arg (arg1, POINTER_TYPE)
8933 || !validate_arg (arg2, POINTER_TYPE)
8934 || !validate_arg (len, INTEGER_TYPE))
8937 /* If the LEN parameter is zero, return zero. */
8938 if (integer_zerop (len))
8939 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8942 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8943 if (operand_equal_p (arg1, arg2, 0))
8944 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8946 p1 = c_getstr (arg1);
8947 p2 = c_getstr (arg2);
8949 if (host_integerp (len, 1) && p1 && p2)
8951 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8953 return integer_one_node;
8955 return integer_minus_one_node;
8957 return integer_zero_node;
8960 /* If the second arg is "", and the length is greater than zero,
8961 return *(const unsigned char*)arg1. */
8962 if (p2 && *p2 == '\0'
8963 && TREE_CODE (len) == INTEGER_CST
8964 && tree_int_cst_sgn (len) == 1)
8966 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8967 tree cst_uchar_ptr_node
8968 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8970 return fold_convert_loc (loc, integer_type_node,
8971 build1 (INDIRECT_REF, cst_uchar_node,
8972 fold_convert_loc (loc,
8977 /* If the first arg is "", and the length is greater than zero,
8978 return -*(const unsigned char*)arg2. */
8979 if (p1 && *p1 == '\0'
8980 && TREE_CODE (len) == INTEGER_CST
8981 && tree_int_cst_sgn (len) == 1)
8983 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8984 tree cst_uchar_ptr_node
8985 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8987 tree temp = fold_convert_loc (loc, integer_type_node,
8988 build1 (INDIRECT_REF, cst_uchar_node,
8989 fold_convert_loc (loc,
8992 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8995 /* If len parameter is one, return an expression corresponding to
8996 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8997 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8999 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9000 tree cst_uchar_ptr_node
9001 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9003 tree ind1 = fold_convert_loc (loc, integer_type_node,
9004 build1 (INDIRECT_REF, cst_uchar_node,
9005 fold_convert_loc (loc,
9008 tree ind2 = fold_convert_loc (loc, integer_type_node,
9009 build1 (INDIRECT_REF, cst_uchar_node,
9010 fold_convert_loc (loc,
9013 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9019 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9020 ARG. Return NULL_TREE if no simplification can be made. */
9023 fold_builtin_signbit (location_t loc, tree arg, tree type)
9027 if (!validate_arg (arg, REAL_TYPE))
9030 /* If ARG is a compile-time constant, determine the result. */
9031 if (TREE_CODE (arg) == REAL_CST
9032 && !TREE_OVERFLOW (arg))
9036 c = TREE_REAL_CST (arg);
9037 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9038 return fold_convert_loc (loc, type, temp);
9041 /* If ARG is non-negative, the result is always zero. */
9042 if (tree_expr_nonnegative_p (arg))
9043 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9045 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9046 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9047 return fold_build2_loc (loc, LT_EXPR, type, arg,
9048 build_real (TREE_TYPE (arg), dconst0));
9053 /* Fold function call to builtin copysign, copysignf or copysignl with
9054 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9058 fold_builtin_copysign (location_t loc, tree fndecl,
9059 tree arg1, tree arg2, tree type)
9063 if (!validate_arg (arg1, REAL_TYPE)
9064 || !validate_arg (arg2, REAL_TYPE))
9067 /* copysign(X,X) is X. */
9068 if (operand_equal_p (arg1, arg2, 0))
9069 return fold_convert_loc (loc, type, arg1);
9071 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9072 if (TREE_CODE (arg1) == REAL_CST
9073 && TREE_CODE (arg2) == REAL_CST
9074 && !TREE_OVERFLOW (arg1)
9075 && !TREE_OVERFLOW (arg2))
9077 REAL_VALUE_TYPE c1, c2;
9079 c1 = TREE_REAL_CST (arg1);
9080 c2 = TREE_REAL_CST (arg2);
9081 /* c1.sign := c2.sign. */
9082 real_copysign (&c1, &c2);
9083 return build_real (type, c1);
9086 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9087 Remember to evaluate Y for side-effects. */
9088 if (tree_expr_nonnegative_p (arg2))
9089 return omit_one_operand_loc (loc, type,
9090 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9093 /* Strip sign changing operations for the first argument. */
9094 tem = fold_strip_sign_ops (arg1);
9096 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9101 /* Fold a call to builtin isascii with argument ARG. */
9104 fold_builtin_isascii (location_t loc, tree arg)
9106 if (!validate_arg (arg, INTEGER_TYPE))
9110 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9111 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9112 build_int_cst (NULL_TREE,
9113 ~ (unsigned HOST_WIDE_INT) 0x7f));
9114 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9115 arg, integer_zero_node);
9119 /* Fold a call to builtin toascii with argument ARG. */
9122 fold_builtin_toascii (location_t loc, tree arg)
9124 if (!validate_arg (arg, INTEGER_TYPE))
9127 /* Transform toascii(c) -> (c & 0x7f). */
9128 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9129 build_int_cst (NULL_TREE, 0x7f));
9132 /* Fold a call to builtin isdigit with argument ARG. */
9135 fold_builtin_isdigit (location_t loc, tree arg)
9137 if (!validate_arg (arg, INTEGER_TYPE))
9141 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9142 /* According to the C standard, isdigit is unaffected by locale.
9143 However, it definitely is affected by the target character set. */
9144 unsigned HOST_WIDE_INT target_digit0
9145 = lang_hooks.to_target_charset ('0');
9147 if (target_digit0 == 0)
9150 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9151 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9152 build_int_cst (unsigned_type_node, target_digit0));
9153 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9154 build_int_cst (unsigned_type_node, 9));
9158 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9161 fold_builtin_fabs (location_t loc, tree arg, tree type)
9163 if (!validate_arg (arg, REAL_TYPE))
9166 arg = fold_convert_loc (loc, type, arg);
9167 if (TREE_CODE (arg) == REAL_CST)
9168 return fold_abs_const (arg, type);
9169 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9172 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9175 fold_builtin_abs (location_t loc, tree arg, tree type)
9177 if (!validate_arg (arg, INTEGER_TYPE))
9180 arg = fold_convert_loc (loc, type, arg);
9181 if (TREE_CODE (arg) == INTEGER_CST)
9182 return fold_abs_const (arg, type);
9183 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9186 /* Fold a call to builtin fmin or fmax. */
9189 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9190 tree type, bool max)
9192 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9194 /* Calculate the result when the argument is a constant. */
9195 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9200 /* If either argument is NaN, return the other one. Avoid the
9201 transformation if we get (and honor) a signalling NaN. Using
9202 omit_one_operand() ensures we create a non-lvalue. */
9203 if (TREE_CODE (arg0) == REAL_CST
9204 && real_isnan (&TREE_REAL_CST (arg0))
9205 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9206 || ! TREE_REAL_CST (arg0).signalling))
9207 return omit_one_operand_loc (loc, type, arg1, arg0);
9208 if (TREE_CODE (arg1) == REAL_CST
9209 && real_isnan (&TREE_REAL_CST (arg1))
9210 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9211 || ! TREE_REAL_CST (arg1).signalling))
9212 return omit_one_operand_loc (loc, type, arg0, arg1);
9214 /* Transform fmin/fmax(x,x) -> x. */
9215 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9216 return omit_one_operand_loc (loc, type, arg0, arg1);
9218 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9219 functions to return the numeric arg if the other one is NaN.
9220 These tree codes don't honor that, so only transform if
9221 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9222 handled, so we don't have to worry about it either. */
9223 if (flag_finite_math_only)
9224 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9225 fold_convert_loc (loc, type, arg0),
9226 fold_convert_loc (loc, type, arg1));
9231 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9234 fold_builtin_carg (location_t loc, tree arg, tree type)
9236 if (validate_arg (arg, COMPLEX_TYPE)
9237 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9239 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9243 tree new_arg = builtin_save_expr (arg);
9244 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9245 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9246 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9253 /* Fold a call to builtin logb/ilogb. */
9256 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9258 if (! validate_arg (arg, REAL_TYPE))
9263 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9265 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9271 /* If arg is Inf or NaN and we're logb, return it. */
9272 if (TREE_CODE (rettype) == REAL_TYPE)
9273 return fold_convert_loc (loc, rettype, arg);
9274 /* Fall through... */
9276 /* Zero may set errno and/or raise an exception for logb, also
9277 for ilogb we don't know FP_ILOGB0. */
9280 /* For normal numbers, proceed iff radix == 2. In GCC,
9281 normalized significands are in the range [0.5, 1.0). We
9282 want the exponent as if they were [1.0, 2.0) so get the
9283 exponent and subtract 1. */
9284 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9285 return fold_convert_loc (loc, rettype,
9286 build_int_cst (NULL_TREE,
9287 REAL_EXP (value)-1));
9295 /* Fold a call to builtin significand, if radix == 2. */
9298 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9300 if (! validate_arg (arg, REAL_TYPE))
9305 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9307 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9314 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9315 return fold_convert_loc (loc, rettype, arg);
9317 /* For normal numbers, proceed iff radix == 2. */
9318 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9320 REAL_VALUE_TYPE result = *value;
9321 /* In GCC, normalized significands are in the range [0.5,
9322 1.0). We want them to be [1.0, 2.0) so set the
9324 SET_REAL_EXP (&result, 1);
9325 return build_real (rettype, result);
9334 /* Fold a call to builtin frexp, we can assume the base is 2. */
9337 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9339 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9344 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9347 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9349 /* Proceed if a valid pointer type was passed in. */
9350 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9352 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9358 /* For +-0, return (*exp = 0, +-0). */
9359 exp = integer_zero_node;
9364 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9365 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9368 /* Since the frexp function always expects base 2, and in
9369 GCC normalized significands are already in the range
9370 [0.5, 1.0), we have exactly what frexp wants. */
9371 REAL_VALUE_TYPE frac_rvt = *value;
9372 SET_REAL_EXP (&frac_rvt, 0);
9373 frac = build_real (rettype, frac_rvt);
9374 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9381 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9382 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9383 TREE_SIDE_EFFECTS (arg1) = 1;
9384 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9390 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9391 then we can assume the base is two. If it's false, then we have to
9392 check the mode of the TYPE parameter in certain cases. */
9395 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9396 tree type, bool ldexp)
9398 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9403 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9404 if (real_zerop (arg0) || integer_zerop (arg1)
9405 || (TREE_CODE (arg0) == REAL_CST
9406 && !real_isfinite (&TREE_REAL_CST (arg0))))
9407 return omit_one_operand_loc (loc, type, arg0, arg1);
9409 /* If both arguments are constant, then try to evaluate it. */
9410 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9411 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9412 && host_integerp (arg1, 0))
9414 /* Bound the maximum adjustment to twice the range of the
9415 mode's valid exponents. Use abs to ensure the range is
9416 positive as a sanity check. */
9417 const long max_exp_adj = 2 *
9418 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9419 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9421 /* Get the user-requested adjustment. */
9422 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9424 /* The requested adjustment must be inside this range. This
9425 is a preliminary cap to avoid things like overflow, we
9426 may still fail to compute the result for other reasons. */
9427 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9429 REAL_VALUE_TYPE initial_result;
9431 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9433 /* Ensure we didn't overflow. */
9434 if (! real_isinf (&initial_result))
9436 const REAL_VALUE_TYPE trunc_result
9437 = real_value_truncate (TYPE_MODE (type), initial_result);
9439 /* Only proceed if the target mode can hold the
9441 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9442 return build_real (type, trunc_result);
9451 /* Fold a call to builtin modf. */
9454 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9456 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9461 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9464 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9466 /* Proceed if a valid pointer type was passed in. */
9467 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9469 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9470 REAL_VALUE_TYPE trunc, frac;
9476 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9477 trunc = frac = *value;
9480 /* For +-Inf, return (*arg1 = arg0, +-0). */
9482 frac.sign = value->sign;
9486 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9487 real_trunc (&trunc, VOIDmode, value);
9488 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9489 /* If the original number was negative and already
9490 integral, then the fractional part is -0.0. */
9491 if (value->sign && frac.cl == rvc_zero)
9492 frac.sign = value->sign;
9496 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9497 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9498 build_real (rettype, trunc));
9499 TREE_SIDE_EFFECTS (arg1) = 1;
9500 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9501 build_real (rettype, frac));
9507 /* Given a location LOC, an interclass builtin function decl FNDECL
9508 and its single argument ARG, return an folded expression computing
9509 the same, or NULL_TREE if we either couldn't or didn't want to fold
9510 (the latter happen if there's an RTL instruction available). */
9513 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9515 enum machine_mode mode;
9517 if (!validate_arg (arg, REAL_TYPE))
9520 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9523 mode = TYPE_MODE (TREE_TYPE (arg));
9525 /* If there is no optab, try generic code. */
9526 switch (DECL_FUNCTION_CODE (fndecl))
9530 CASE_FLT_FN (BUILT_IN_ISINF):
9532 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9533 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9534 tree const type = TREE_TYPE (arg);
9538 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9539 real_from_string (&r, buf);
9540 result = build_call_expr (isgr_fn, 2,
9541 fold_build1_loc (loc, ABS_EXPR, type, arg),
9542 build_real (type, r));
9545 CASE_FLT_FN (BUILT_IN_FINITE):
9546 case BUILT_IN_ISFINITE:
9548 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9549 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9550 tree const type = TREE_TYPE (arg);
9554 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9555 real_from_string (&r, buf);
9556 result = build_call_expr (isle_fn, 2,
9557 fold_build1_loc (loc, ABS_EXPR, type, arg),
9558 build_real (type, r));
9559 /*result = fold_build2_loc (loc, UNGT_EXPR,
9560 TREE_TYPE (TREE_TYPE (fndecl)),
9561 fold_build1_loc (loc, ABS_EXPR, type, arg),
9562 build_real (type, r));
9563 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9564 TREE_TYPE (TREE_TYPE (fndecl)),
9568 case BUILT_IN_ISNORMAL:
9570 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9571 islessequal(fabs(x),DBL_MAX). */
9572 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9573 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9574 tree const type = TREE_TYPE (arg);
9575 REAL_VALUE_TYPE rmax, rmin;
9578 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9579 real_from_string (&rmax, buf);
9580 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9581 real_from_string (&rmin, buf);
9582 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9583 result = build_call_expr (isle_fn, 2, arg,
9584 build_real (type, rmax));
9585 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9586 build_call_expr (isge_fn, 2, arg,
9587 build_real (type, rmin)));
9597 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9598 ARG is the argument for the call. */
9601 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9603 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9606 if (!validate_arg (arg, REAL_TYPE))
9609 switch (builtin_index)
9611 case BUILT_IN_ISINF:
9612 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9613 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9615 if (TREE_CODE (arg) == REAL_CST)
9617 r = TREE_REAL_CST (arg);
9618 if (real_isinf (&r))
9619 return real_compare (GT_EXPR, &r, &dconst0)
9620 ? integer_one_node : integer_minus_one_node;
9622 return integer_zero_node;
9627 case BUILT_IN_ISINF_SIGN:
9629 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9630 /* In a boolean context, GCC will fold the inner COND_EXPR to
9631 1. So e.g. "if (isinf_sign(x))" would be folded to just
9632 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9633 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9634 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9635 tree tmp = NULL_TREE;
9637 arg = builtin_save_expr (arg);
9639 if (signbit_fn && isinf_fn)
9641 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9642 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9644 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9645 signbit_call, integer_zero_node);
9646 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9647 isinf_call, integer_zero_node);
9649 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9650 integer_minus_one_node, integer_one_node);
9651 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9659 case BUILT_IN_ISFINITE:
9660 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9661 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9662 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9664 if (TREE_CODE (arg) == REAL_CST)
9666 r = TREE_REAL_CST (arg);
9667 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9672 case BUILT_IN_ISNAN:
9673 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9674 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9676 if (TREE_CODE (arg) == REAL_CST)
9678 r = TREE_REAL_CST (arg);
9679 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9682 arg = builtin_save_expr (arg);
9683 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9690 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9691 This builtin will generate code to return the appropriate floating
9692 point classification depending on the value of the floating point
9693 number passed in. The possible return values must be supplied as
9694 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9695 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9696 one floating point argument which is "type generic". */
9699 fold_builtin_fpclassify (location_t loc, tree exp)
9701 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9702 arg, type, res, tmp;
9703 enum machine_mode mode;
9707 /* Verify the required arguments in the original call. */
9708 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9709 INTEGER_TYPE, INTEGER_TYPE,
9710 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9713 fp_nan = CALL_EXPR_ARG (exp, 0);
9714 fp_infinite = CALL_EXPR_ARG (exp, 1);
9715 fp_normal = CALL_EXPR_ARG (exp, 2);
9716 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9717 fp_zero = CALL_EXPR_ARG (exp, 4);
9718 arg = CALL_EXPR_ARG (exp, 5);
9719 type = TREE_TYPE (arg);
9720 mode = TYPE_MODE (type);
9721 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9725 (fabs(x) == Inf ? FP_INFINITE :
9726 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9727 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9729 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9730 build_real (type, dconst0));
9731 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9732 tmp, fp_zero, fp_subnormal);
9734 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9735 real_from_string (&r, buf);
9736 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9737 arg, build_real (type, r));
9738 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9740 if (HONOR_INFINITIES (mode))
9743 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9744 build_real (type, r));
9745 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9749 if (HONOR_NANS (mode))
9751 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9752 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9758 /* Fold a call to an unordered comparison function such as
9759 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9760 being called and ARG0 and ARG1 are the arguments for the call.
9761 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9762 the opposite of the desired result. UNORDERED_CODE is used
9763 for modes that can hold NaNs and ORDERED_CODE is used for
9767 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9768 enum tree_code unordered_code,
9769 enum tree_code ordered_code)
9771 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9772 enum tree_code code;
9774 enum tree_code code0, code1;
9775 tree cmp_type = NULL_TREE;
9777 type0 = TREE_TYPE (arg0);
9778 type1 = TREE_TYPE (arg1);
9780 code0 = TREE_CODE (type0);
9781 code1 = TREE_CODE (type1);
9783 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9784 /* Choose the wider of two real types. */
9785 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9787 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9789 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9792 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9793 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9795 if (unordered_code == UNORDERED_EXPR)
9797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9798 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9799 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9802 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9804 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9805 fold_build2_loc (loc, code, type, arg0, arg1));
9808 /* Fold a call to built-in function FNDECL with 0 arguments.
9809 IGNORE is true if the result of the function call is ignored. This
9810 function returns NULL_TREE if no simplification was possible. */
9813 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9815 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9816 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9819 CASE_FLT_FN (BUILT_IN_INF):
9820 case BUILT_IN_INFD32:
9821 case BUILT_IN_INFD64:
9822 case BUILT_IN_INFD128:
9823 return fold_builtin_inf (loc, type, true);
9825 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9826 return fold_builtin_inf (loc, type, false);
9828 case BUILT_IN_CLASSIFY_TYPE:
9829 return fold_builtin_classify_type (NULL_TREE);
9837 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9838 IGNORE is true if the result of the function call is ignored. This
9839 function returns NULL_TREE if no simplification was possible. */
9842 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9844 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9845 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9848 case BUILT_IN_CONSTANT_P:
9850 tree val = fold_builtin_constant_p (arg0);
9852 /* Gimplification will pull the CALL_EXPR for the builtin out of
9853 an if condition. When not optimizing, we'll not CSE it back.
9854 To avoid link error types of regressions, return false now. */
9855 if (!val && !optimize)
9856 val = integer_zero_node;
9861 case BUILT_IN_CLASSIFY_TYPE:
9862 return fold_builtin_classify_type (arg0);
9864 case BUILT_IN_STRLEN:
9865 return fold_builtin_strlen (loc, type, arg0);
9867 CASE_FLT_FN (BUILT_IN_FABS):
9868 return fold_builtin_fabs (loc, arg0, type);
9872 case BUILT_IN_LLABS:
9873 case BUILT_IN_IMAXABS:
9874 return fold_builtin_abs (loc, arg0, type);
9876 CASE_FLT_FN (BUILT_IN_CONJ):
9877 if (validate_arg (arg0, COMPLEX_TYPE)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9879 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9882 CASE_FLT_FN (BUILT_IN_CREAL):
9883 if (validate_arg (arg0, COMPLEX_TYPE)
9884 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9885 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9888 CASE_FLT_FN (BUILT_IN_CIMAG):
9889 if (validate_arg (arg0, COMPLEX_TYPE)
9890 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9891 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9894 CASE_FLT_FN (BUILT_IN_CCOS):
9895 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9897 CASE_FLT_FN (BUILT_IN_CCOSH):
9898 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9900 CASE_FLT_FN (BUILT_IN_CPROJ):
9901 return fold_builtin_cproj(loc, arg0, type);
9903 CASE_FLT_FN (BUILT_IN_CSIN):
9904 if (validate_arg (arg0, COMPLEX_TYPE)
9905 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9906 return do_mpc_arg1 (arg0, type, mpc_sin);
9909 CASE_FLT_FN (BUILT_IN_CSINH):
9910 if (validate_arg (arg0, COMPLEX_TYPE)
9911 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9912 return do_mpc_arg1 (arg0, type, mpc_sinh);
9915 CASE_FLT_FN (BUILT_IN_CTAN):
9916 if (validate_arg (arg0, COMPLEX_TYPE)
9917 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9918 return do_mpc_arg1 (arg0, type, mpc_tan);
9921 CASE_FLT_FN (BUILT_IN_CTANH):
9922 if (validate_arg (arg0, COMPLEX_TYPE)
9923 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9924 return do_mpc_arg1 (arg0, type, mpc_tanh);
9927 CASE_FLT_FN (BUILT_IN_CLOG):
9928 if (validate_arg (arg0, COMPLEX_TYPE)
9929 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9930 return do_mpc_arg1 (arg0, type, mpc_log);
9933 CASE_FLT_FN (BUILT_IN_CSQRT):
9934 if (validate_arg (arg0, COMPLEX_TYPE)
9935 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9936 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9939 CASE_FLT_FN (BUILT_IN_CASIN):
9940 if (validate_arg (arg0, COMPLEX_TYPE)
9941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9942 return do_mpc_arg1 (arg0, type, mpc_asin);
9945 CASE_FLT_FN (BUILT_IN_CACOS):
9946 if (validate_arg (arg0, COMPLEX_TYPE)
9947 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9948 return do_mpc_arg1 (arg0, type, mpc_acos);
9951 CASE_FLT_FN (BUILT_IN_CATAN):
9952 if (validate_arg (arg0, COMPLEX_TYPE)
9953 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9954 return do_mpc_arg1 (arg0, type, mpc_atan);
9957 CASE_FLT_FN (BUILT_IN_CASINH):
9958 if (validate_arg (arg0, COMPLEX_TYPE)
9959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9960 return do_mpc_arg1 (arg0, type, mpc_asinh);
9963 CASE_FLT_FN (BUILT_IN_CACOSH):
9964 if (validate_arg (arg0, COMPLEX_TYPE)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9966 return do_mpc_arg1 (arg0, type, mpc_acosh);
9969 CASE_FLT_FN (BUILT_IN_CATANH):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9972 return do_mpc_arg1 (arg0, type, mpc_atanh);
9975 CASE_FLT_FN (BUILT_IN_CABS):
9976 return fold_builtin_cabs (loc, arg0, type, fndecl);
9978 CASE_FLT_FN (BUILT_IN_CARG):
9979 return fold_builtin_carg (loc, arg0, type);
9981 CASE_FLT_FN (BUILT_IN_SQRT):
9982 return fold_builtin_sqrt (loc, arg0, type);
9984 CASE_FLT_FN (BUILT_IN_CBRT):
9985 return fold_builtin_cbrt (loc, arg0, type);
9987 CASE_FLT_FN (BUILT_IN_ASIN):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9990 &dconstm1, &dconst1, true);
9993 CASE_FLT_FN (BUILT_IN_ACOS):
9994 if (validate_arg (arg0, REAL_TYPE))
9995 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9996 &dconstm1, &dconst1, true);
9999 CASE_FLT_FN (BUILT_IN_ATAN):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10004 CASE_FLT_FN (BUILT_IN_ASINH):
10005 if (validate_arg (arg0, REAL_TYPE))
10006 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10009 CASE_FLT_FN (BUILT_IN_ACOSH):
10010 if (validate_arg (arg0, REAL_TYPE))
10011 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10012 &dconst1, NULL, true);
10015 CASE_FLT_FN (BUILT_IN_ATANH):
10016 if (validate_arg (arg0, REAL_TYPE))
10017 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10018 &dconstm1, &dconst1, false);
10021 CASE_FLT_FN (BUILT_IN_SIN):
10022 if (validate_arg (arg0, REAL_TYPE))
10023 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10026 CASE_FLT_FN (BUILT_IN_COS):
10027 return fold_builtin_cos (loc, arg0, type, fndecl);
10029 CASE_FLT_FN (BUILT_IN_TAN):
10030 return fold_builtin_tan (arg0, type);
10032 CASE_FLT_FN (BUILT_IN_CEXP):
10033 return fold_builtin_cexp (loc, arg0, type);
10035 CASE_FLT_FN (BUILT_IN_CEXPI):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10040 CASE_FLT_FN (BUILT_IN_SINH):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10045 CASE_FLT_FN (BUILT_IN_COSH):
10046 return fold_builtin_cosh (loc, arg0, type, fndecl);
10048 CASE_FLT_FN (BUILT_IN_TANH):
10049 if (validate_arg (arg0, REAL_TYPE))
10050 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10053 CASE_FLT_FN (BUILT_IN_ERF):
10054 if (validate_arg (arg0, REAL_TYPE))
10055 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10058 CASE_FLT_FN (BUILT_IN_ERFC):
10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10063 CASE_FLT_FN (BUILT_IN_TGAMMA):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10068 CASE_FLT_FN (BUILT_IN_EXP):
10069 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10071 CASE_FLT_FN (BUILT_IN_EXP2):
10072 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10074 CASE_FLT_FN (BUILT_IN_EXP10):
10075 CASE_FLT_FN (BUILT_IN_POW10):
10076 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10078 CASE_FLT_FN (BUILT_IN_EXPM1):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10083 CASE_FLT_FN (BUILT_IN_LOG):
10084 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10086 CASE_FLT_FN (BUILT_IN_LOG2):
10087 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10089 CASE_FLT_FN (BUILT_IN_LOG10):
10090 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10092 CASE_FLT_FN (BUILT_IN_LOG1P):
10093 if (validate_arg (arg0, REAL_TYPE))
10094 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10095 &dconstm1, NULL, false);
10098 CASE_FLT_FN (BUILT_IN_J0):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10104 CASE_FLT_FN (BUILT_IN_J1):
10105 if (validate_arg (arg0, REAL_TYPE))
10106 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10110 CASE_FLT_FN (BUILT_IN_Y0):
10111 if (validate_arg (arg0, REAL_TYPE))
10112 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10113 &dconst0, NULL, false);
10116 CASE_FLT_FN (BUILT_IN_Y1):
10117 if (validate_arg (arg0, REAL_TYPE))
10118 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10119 &dconst0, NULL, false);
10122 CASE_FLT_FN (BUILT_IN_NAN):
10123 case BUILT_IN_NAND32:
10124 case BUILT_IN_NAND64:
10125 case BUILT_IN_NAND128:
10126 return fold_builtin_nan (arg0, type, true);
10128 CASE_FLT_FN (BUILT_IN_NANS):
10129 return fold_builtin_nan (arg0, type, false);
10131 CASE_FLT_FN (BUILT_IN_FLOOR):
10132 return fold_builtin_floor (loc, fndecl, arg0);
10134 CASE_FLT_FN (BUILT_IN_CEIL):
10135 return fold_builtin_ceil (loc, fndecl, arg0);
10137 CASE_FLT_FN (BUILT_IN_TRUNC):
10138 return fold_builtin_trunc (loc, fndecl, arg0);
10140 CASE_FLT_FN (BUILT_IN_ROUND):
10141 return fold_builtin_round (loc, fndecl, arg0);
10143 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10144 CASE_FLT_FN (BUILT_IN_RINT):
10145 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10147 CASE_FLT_FN (BUILT_IN_LCEIL):
10148 CASE_FLT_FN (BUILT_IN_LLCEIL):
10149 CASE_FLT_FN (BUILT_IN_LFLOOR):
10150 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10151 CASE_FLT_FN (BUILT_IN_LROUND):
10152 CASE_FLT_FN (BUILT_IN_LLROUND):
10153 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10155 CASE_FLT_FN (BUILT_IN_LRINT):
10156 CASE_FLT_FN (BUILT_IN_LLRINT):
10157 return fold_fixed_mathfn (loc, fndecl, arg0);
10159 case BUILT_IN_BSWAP32:
10160 case BUILT_IN_BSWAP64:
10161 return fold_builtin_bswap (fndecl, arg0);
10163 CASE_INT_FN (BUILT_IN_FFS):
10164 CASE_INT_FN (BUILT_IN_CLZ):
10165 CASE_INT_FN (BUILT_IN_CTZ):
10166 CASE_INT_FN (BUILT_IN_POPCOUNT):
10167 CASE_INT_FN (BUILT_IN_PARITY):
10168 return fold_builtin_bitop (fndecl, arg0);
10170 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10171 return fold_builtin_signbit (loc, arg0, type);
10173 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10174 return fold_builtin_significand (loc, arg0, type);
10176 CASE_FLT_FN (BUILT_IN_ILOGB):
10177 CASE_FLT_FN (BUILT_IN_LOGB):
10178 return fold_builtin_logb (loc, arg0, type);
10180 case BUILT_IN_ISASCII:
10181 return fold_builtin_isascii (loc, arg0);
10183 case BUILT_IN_TOASCII:
10184 return fold_builtin_toascii (loc, arg0);
10186 case BUILT_IN_ISDIGIT:
10187 return fold_builtin_isdigit (loc, arg0);
10189 CASE_FLT_FN (BUILT_IN_FINITE):
10190 case BUILT_IN_FINITED32:
10191 case BUILT_IN_FINITED64:
10192 case BUILT_IN_FINITED128:
10193 case BUILT_IN_ISFINITE:
10195 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10198 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10201 CASE_FLT_FN (BUILT_IN_ISINF):
10202 case BUILT_IN_ISINFD32:
10203 case BUILT_IN_ISINFD64:
10204 case BUILT_IN_ISINFD128:
10206 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10209 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10212 case BUILT_IN_ISNORMAL:
10213 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10215 case BUILT_IN_ISINF_SIGN:
10216 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10218 CASE_FLT_FN (BUILT_IN_ISNAN):
10219 case BUILT_IN_ISNAND32:
10220 case BUILT_IN_ISNAND64:
10221 case BUILT_IN_ISNAND128:
10222 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10224 case BUILT_IN_PRINTF:
10225 case BUILT_IN_PRINTF_UNLOCKED:
10226 case BUILT_IN_VPRINTF:
10227 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10229 case BUILT_IN_FREE:
10230 if (integer_zerop (arg0))
10231 return build_empty_stmt (loc);
10242 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10243 IGNORE is true if the result of the function call is ignored. This
10244 function returns NULL_TREE if no simplification was possible. */
10247 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10249 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10250 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10254 CASE_FLT_FN (BUILT_IN_JN):
10255 if (validate_arg (arg0, INTEGER_TYPE)
10256 && validate_arg (arg1, REAL_TYPE))
10257 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10260 CASE_FLT_FN (BUILT_IN_YN):
10261 if (validate_arg (arg0, INTEGER_TYPE)
10262 && validate_arg (arg1, REAL_TYPE))
10263 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10267 CASE_FLT_FN (BUILT_IN_DREM):
10268 CASE_FLT_FN (BUILT_IN_REMAINDER):
10269 if (validate_arg (arg0, REAL_TYPE)
10270 && validate_arg(arg1, REAL_TYPE))
10271 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10274 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10275 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10276 if (validate_arg (arg0, REAL_TYPE)
10277 && validate_arg(arg1, POINTER_TYPE))
10278 return do_mpfr_lgamma_r (arg0, arg1, type);
10281 CASE_FLT_FN (BUILT_IN_ATAN2):
10282 if (validate_arg (arg0, REAL_TYPE)
10283 && validate_arg(arg1, REAL_TYPE))
10284 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10287 CASE_FLT_FN (BUILT_IN_FDIM):
10288 if (validate_arg (arg0, REAL_TYPE)
10289 && validate_arg(arg1, REAL_TYPE))
10290 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10293 CASE_FLT_FN (BUILT_IN_HYPOT):
10294 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10296 CASE_FLT_FN (BUILT_IN_CPOW):
10297 if (validate_arg (arg0, COMPLEX_TYPE)
10298 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10299 && validate_arg (arg1, COMPLEX_TYPE)
10300 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10301 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10304 CASE_FLT_FN (BUILT_IN_LDEXP):
10305 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10306 CASE_FLT_FN (BUILT_IN_SCALBN):
10307 CASE_FLT_FN (BUILT_IN_SCALBLN):
10308 return fold_builtin_load_exponent (loc, arg0, arg1,
10309 type, /*ldexp=*/false);
10311 CASE_FLT_FN (BUILT_IN_FREXP):
10312 return fold_builtin_frexp (loc, arg0, arg1, type);
10314 CASE_FLT_FN (BUILT_IN_MODF):
10315 return fold_builtin_modf (loc, arg0, arg1, type);
10317 case BUILT_IN_BZERO:
10318 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10320 case BUILT_IN_FPUTS:
10321 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10323 case BUILT_IN_FPUTS_UNLOCKED:
10324 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10326 case BUILT_IN_STRSTR:
10327 return fold_builtin_strstr (loc, arg0, arg1, type);
10329 case BUILT_IN_STRCAT:
10330 return fold_builtin_strcat (loc, arg0, arg1);
10332 case BUILT_IN_STRSPN:
10333 return fold_builtin_strspn (loc, arg0, arg1);
10335 case BUILT_IN_STRCSPN:
10336 return fold_builtin_strcspn (loc, arg0, arg1);
10338 case BUILT_IN_STRCHR:
10339 case BUILT_IN_INDEX:
10340 return fold_builtin_strchr (loc, arg0, arg1, type);
10342 case BUILT_IN_STRRCHR:
10343 case BUILT_IN_RINDEX:
10344 return fold_builtin_strrchr (loc, arg0, arg1, type);
10346 case BUILT_IN_STRCPY:
10347 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10349 case BUILT_IN_STPCPY:
10352 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10356 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10359 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10362 case BUILT_IN_STRCMP:
10363 return fold_builtin_strcmp (loc, arg0, arg1);
10365 case BUILT_IN_STRPBRK:
10366 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10368 case BUILT_IN_EXPECT:
10369 return fold_builtin_expect (loc, arg0, arg1);
10371 CASE_FLT_FN (BUILT_IN_POW):
10372 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10374 CASE_FLT_FN (BUILT_IN_POWI):
10375 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10377 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10378 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10380 CASE_FLT_FN (BUILT_IN_FMIN):
10381 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10383 CASE_FLT_FN (BUILT_IN_FMAX):
10384 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10386 case BUILT_IN_ISGREATER:
10387 return fold_builtin_unordered_cmp (loc, fndecl,
10388 arg0, arg1, UNLE_EXPR, LE_EXPR);
10389 case BUILT_IN_ISGREATEREQUAL:
10390 return fold_builtin_unordered_cmp (loc, fndecl,
10391 arg0, arg1, UNLT_EXPR, LT_EXPR);
10392 case BUILT_IN_ISLESS:
10393 return fold_builtin_unordered_cmp (loc, fndecl,
10394 arg0, arg1, UNGE_EXPR, GE_EXPR);
10395 case BUILT_IN_ISLESSEQUAL:
10396 return fold_builtin_unordered_cmp (loc, fndecl,
10397 arg0, arg1, UNGT_EXPR, GT_EXPR);
10398 case BUILT_IN_ISLESSGREATER:
10399 return fold_builtin_unordered_cmp (loc, fndecl,
10400 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10401 case BUILT_IN_ISUNORDERED:
10402 return fold_builtin_unordered_cmp (loc, fndecl,
10403 arg0, arg1, UNORDERED_EXPR,
10406 /* We do the folding for va_start in the expander. */
10407 case BUILT_IN_VA_START:
10410 case BUILT_IN_SPRINTF:
10411 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10413 case BUILT_IN_OBJECT_SIZE:
10414 return fold_builtin_object_size (arg0, arg1);
10416 case BUILT_IN_PRINTF:
10417 case BUILT_IN_PRINTF_UNLOCKED:
10418 case BUILT_IN_VPRINTF:
10419 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10421 case BUILT_IN_PRINTF_CHK:
10422 case BUILT_IN_VPRINTF_CHK:
10423 if (!validate_arg (arg0, INTEGER_TYPE)
10424 || TREE_SIDE_EFFECTS (arg0))
10427 return fold_builtin_printf (loc, fndecl,
10428 arg1, NULL_TREE, ignore, fcode);
10431 case BUILT_IN_FPRINTF:
10432 case BUILT_IN_FPRINTF_UNLOCKED:
10433 case BUILT_IN_VFPRINTF:
10434 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10443 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10444 and ARG2. IGNORE is true if the result of the function call is ignored.
10445 This function returns NULL_TREE if no simplification was possible. */
10448 fold_builtin_3 (location_t loc, tree fndecl,
10449 tree arg0, tree arg1, tree arg2, bool ignore)
10451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10456 CASE_FLT_FN (BUILT_IN_SINCOS):
10457 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10459 CASE_FLT_FN (BUILT_IN_FMA):
10460 if (validate_arg (arg0, REAL_TYPE)
10461 && validate_arg(arg1, REAL_TYPE)
10462 && validate_arg(arg2, REAL_TYPE))
10463 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10466 CASE_FLT_FN (BUILT_IN_REMQUO):
10467 if (validate_arg (arg0, REAL_TYPE)
10468 && validate_arg(arg1, REAL_TYPE)
10469 && validate_arg(arg2, POINTER_TYPE))
10470 return do_mpfr_remquo (arg0, arg1, arg2);
10473 case BUILT_IN_MEMSET:
10474 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10476 case BUILT_IN_BCOPY:
10477 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10478 void_type_node, true, /*endp=*/3);
10480 case BUILT_IN_MEMCPY:
10481 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10482 type, ignore, /*endp=*/0);
10484 case BUILT_IN_MEMPCPY:
10485 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10486 type, ignore, /*endp=*/1);
10488 case BUILT_IN_MEMMOVE:
10489 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10490 type, ignore, /*endp=*/3);
10492 case BUILT_IN_STRNCAT:
10493 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10495 case BUILT_IN_STRNCPY:
10496 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10498 case BUILT_IN_STRNCMP:
10499 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10501 case BUILT_IN_MEMCHR:
10502 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10504 case BUILT_IN_BCMP:
10505 case BUILT_IN_MEMCMP:
10506 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10508 case BUILT_IN_SPRINTF:
10509 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10511 case BUILT_IN_STRCPY_CHK:
10512 case BUILT_IN_STPCPY_CHK:
10513 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10516 case BUILT_IN_STRCAT_CHK:
10517 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10519 case BUILT_IN_PRINTF_CHK:
10520 case BUILT_IN_VPRINTF_CHK:
10521 if (!validate_arg (arg0, INTEGER_TYPE)
10522 || TREE_SIDE_EFFECTS (arg0))
10525 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10528 case BUILT_IN_FPRINTF:
10529 case BUILT_IN_FPRINTF_UNLOCKED:
10530 case BUILT_IN_VFPRINTF:
10531 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10534 case BUILT_IN_FPRINTF_CHK:
10535 case BUILT_IN_VFPRINTF_CHK:
10536 if (!validate_arg (arg1, INTEGER_TYPE)
10537 || TREE_SIDE_EFFECTS (arg1))
10540 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10549 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10550 ARG2, and ARG3. IGNORE is true if the result of the function call is
10551 ignored. This function returns NULL_TREE if no simplification was
10555 fold_builtin_4 (location_t loc, tree fndecl,
10556 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10558 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10562 case BUILT_IN_MEMCPY_CHK:
10563 case BUILT_IN_MEMPCPY_CHK:
10564 case BUILT_IN_MEMMOVE_CHK:
10565 case BUILT_IN_MEMSET_CHK:
10566 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10568 DECL_FUNCTION_CODE (fndecl));
10570 case BUILT_IN_STRNCPY_CHK:
10571 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10573 case BUILT_IN_STRNCAT_CHK:
10574 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10576 case BUILT_IN_FPRINTF_CHK:
10577 case BUILT_IN_VFPRINTF_CHK:
10578 if (!validate_arg (arg1, INTEGER_TYPE)
10579 || TREE_SIDE_EFFECTS (arg1))
10582 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10592 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10593 arguments, where NARGS <= 4. IGNORE is true if the result of the
10594 function call is ignored. This function returns NULL_TREE if no
10595 simplification was possible. Note that this only folds builtins with
10596 fixed argument patterns. Foldings that do varargs-to-varargs
10597 transformations, or that match calls with more than 4 arguments,
10598 need to be handled with fold_builtin_varargs instead. */
10600 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10603 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10605 tree ret = NULL_TREE;
10610 ret = fold_builtin_0 (loc, fndecl, ignore);
10613 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10616 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10619 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10622 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10630 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10631 SET_EXPR_LOCATION (ret, loc);
10632 TREE_NO_WARNING (ret) = 1;
10638 /* Builtins with folding operations that operate on "..." arguments
10639 need special handling; we need to store the arguments in a convenient
10640 data structure before attempting any folding. Fortunately there are
10641 only a few builtins that fall into this category. FNDECL is the
10642 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10643 result of the function call is ignored. */
10646 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10647 bool ignore ATTRIBUTE_UNUSED)
10649 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10650 tree ret = NULL_TREE;
10654 case BUILT_IN_SPRINTF_CHK:
10655 case BUILT_IN_VSPRINTF_CHK:
10656 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10659 case BUILT_IN_SNPRINTF_CHK:
10660 case BUILT_IN_VSNPRINTF_CHK:
10661 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10664 case BUILT_IN_FPCLASSIFY:
10665 ret = fold_builtin_fpclassify (loc, exp);
10673 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10674 SET_EXPR_LOCATION (ret, loc);
10675 TREE_NO_WARNING (ret) = 1;
10681 /* Return true if FNDECL shouldn't be folded right now.
10682 If a built-in function has an inline attribute always_inline
10683 wrapper, defer folding it after always_inline functions have
10684 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10685 might not be performed. */
10688 avoid_folding_inline_builtin (tree fndecl)
10690 return (DECL_DECLARED_INLINE_P (fndecl)
10691 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10693 && !cfun->always_inline_functions_inlined
10694 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10697 /* A wrapper function for builtin folding that prevents warnings for
10698 "statement without effect" and the like, caused by removing the
10699 call node earlier than the warning is generated. */
10702 fold_call_expr (location_t loc, tree exp, bool ignore)
10704 tree ret = NULL_TREE;
10705 tree fndecl = get_callee_fndecl (exp);
10707 && TREE_CODE (fndecl) == FUNCTION_DECL
10708 && DECL_BUILT_IN (fndecl)
10709 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10710 yet. Defer folding until we see all the arguments
10711 (after inlining). */
10712 && !CALL_EXPR_VA_ARG_PACK (exp))
10714 int nargs = call_expr_nargs (exp);
10716 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10717 instead last argument is __builtin_va_arg_pack (). Defer folding
10718 even in that case, until arguments are finalized. */
10719 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10721 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10723 && TREE_CODE (fndecl2) == FUNCTION_DECL
10724 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10725 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10729 if (avoid_folding_inline_builtin (fndecl))
10732 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10733 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10734 CALL_EXPR_ARGP (exp), ignore);
10737 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10739 tree *args = CALL_EXPR_ARGP (exp);
10740 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10743 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10751 /* Conveniently construct a function call expression. FNDECL names the
10752 function to be called and N arguments are passed in the array
10756 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10758 tree fntype = TREE_TYPE (fndecl);
10759 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10761 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10764 /* Conveniently construct a function call expression. FNDECL names the
10765 function to be called and the arguments are passed in the vector
10769 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10771 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10772 VEC_address (tree, vec));
10776 /* Conveniently construct a function call expression. FNDECL names the
10777 function to be called, N is the number of arguments, and the "..."
10778 parameters are the argument expressions. */
10781 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10784 tree *argarray = XALLOCAVEC (tree, n);
10788 for (i = 0; i < n; i++)
10789 argarray[i] = va_arg (ap, tree);
10791 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10794 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10795 varargs macros aren't supported by all bootstrap compilers. */
10798 build_call_expr (tree fndecl, int n, ...)
10801 tree *argarray = XALLOCAVEC (tree, n);
10805 for (i = 0; i < n; i++)
10806 argarray[i] = va_arg (ap, tree);
10808 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10811 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10812 N arguments are passed in the array ARGARRAY. */
10815 fold_builtin_call_array (location_t loc, tree type,
10820 tree ret = NULL_TREE;
10823 if (TREE_CODE (fn) == ADDR_EXPR)
10825 tree fndecl = TREE_OPERAND (fn, 0);
10826 if (TREE_CODE (fndecl) == FUNCTION_DECL
10827 && DECL_BUILT_IN (fndecl))
10829 /* If last argument is __builtin_va_arg_pack (), arguments to this
10830 function are not finalized yet. Defer folding until they are. */
10831 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10833 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10835 && TREE_CODE (fndecl2) == FUNCTION_DECL
10836 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10837 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10838 return build_call_array_loc (loc, type, fn, n, argarray);
10840 if (avoid_folding_inline_builtin (fndecl))
10841 return build_call_array_loc (loc, type, fn, n, argarray);
10842 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10844 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10848 return build_call_array_loc (loc, type, fn, n, argarray);
10850 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10852 /* First try the transformations that don't require consing up
10854 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10859 /* If we got this far, we need to build an exp. */
10860 exp = build_call_array_loc (loc, type, fn, n, argarray);
10861 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10862 return ret ? ret : exp;
10866 return build_call_array_loc (loc, type, fn, n, argarray);
10869 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10870 along with N new arguments specified as the "..." parameters. SKIP
10871 is the number of arguments in EXP to be omitted. This function is used
10872 to do varargs-to-varargs transformations. */
10875 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10877 int oldnargs = call_expr_nargs (exp);
10878 int nargs = oldnargs - skip + n;
10879 tree fntype = TREE_TYPE (fndecl);
10880 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10888 buffer = XALLOCAVEC (tree, nargs);
10890 for (i = 0; i < n; i++)
10891 buffer[i] = va_arg (ap, tree);
10893 for (j = skip; j < oldnargs; j++, i++)
10894 buffer[i] = CALL_EXPR_ARG (exp, j);
10897 buffer = CALL_EXPR_ARGP (exp) + skip;
10899 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10902 /* Validate a single argument ARG against a tree code CODE representing
10906 validate_arg (const_tree arg, enum tree_code code)
10910 else if (code == POINTER_TYPE)
10911 return POINTER_TYPE_P (TREE_TYPE (arg));
10912 else if (code == INTEGER_TYPE)
10913 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10914 return code == TREE_CODE (TREE_TYPE (arg));
10917 /* This function validates the types of a function call argument list
10918 against a specified list of tree_codes. If the last specifier is a 0,
10919 that represents an ellipses, otherwise the last specifier must be a
10922 This is the GIMPLE version of validate_arglist. Eventually we want to
10923 completely convert builtins.c to work from GIMPLEs and the tree based
10924 validate_arglist will then be removed. */
10927 validate_gimple_arglist (const_gimple call, ...)
10929 enum tree_code code;
10935 va_start (ap, call);
10940 code = (enum tree_code) va_arg (ap, int);
10944 /* This signifies an ellipses, any further arguments are all ok. */
10948 /* This signifies an endlink, if no arguments remain, return
10949 true, otherwise return false. */
10950 res = (i == gimple_call_num_args (call));
10953 /* If no parameters remain or the parameter's code does not
10954 match the specified code, return false. Otherwise continue
10955 checking any remaining arguments. */
10956 arg = gimple_call_arg (call, i++);
10957 if (!validate_arg (arg, code))
10964 /* We need gotos here since we can only have one VA_CLOSE in a
10972 /* This function validates the types of a function call argument list
10973 against a specified list of tree_codes. If the last specifier is a 0,
10974 that represents an ellipses, otherwise the last specifier must be a
10978 validate_arglist (const_tree callexpr, ...)
10980 enum tree_code code;
10983 const_call_expr_arg_iterator iter;
10986 va_start (ap, callexpr);
10987 init_const_call_expr_arg_iterator (callexpr, &iter);
10991 code = (enum tree_code) va_arg (ap, int);
10995 /* This signifies an ellipses, any further arguments are all ok. */
10999 /* This signifies an endlink, if no arguments remain, return
11000 true, otherwise return false. */
11001 res = !more_const_call_expr_args_p (&iter);
11004 /* If no parameters remain or the parameter's code does not
11005 match the specified code, return false. Otherwise continue
11006 checking any remaining arguments. */
11007 arg = next_const_call_expr_arg (&iter);
11008 if (!validate_arg (arg, code))
11015 /* We need gotos here since we can only have one VA_CLOSE in a
11023 /* Default target-specific builtin expander that does nothing. */
11026 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11027 rtx target ATTRIBUTE_UNUSED,
11028 rtx subtarget ATTRIBUTE_UNUSED,
11029 enum machine_mode mode ATTRIBUTE_UNUSED,
11030 int ignore ATTRIBUTE_UNUSED)
11035 /* Returns true is EXP represents data that would potentially reside
11036 in a readonly section. */
11039 readonly_data_expr (tree exp)
11043 if (TREE_CODE (exp) != ADDR_EXPR)
11046 exp = get_base_address (TREE_OPERAND (exp, 0));
11050 /* Make sure we call decl_readonly_section only for trees it
11051 can handle (since it returns true for everything it doesn't
11053 if (TREE_CODE (exp) == STRING_CST
11054 || TREE_CODE (exp) == CONSTRUCTOR
11055 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11056 return decl_readonly_section (exp, 0);
11061 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11062 to the call, and TYPE is its return type.
11064 Return NULL_TREE if no simplification was possible, otherwise return the
11065 simplified form of the call as a tree.
11067 The simplified form may be a constant or other expression which
11068 computes the same value, but in a more efficient manner (including
11069 calls to other builtin functions).
11071 The call may contain arguments which need to be evaluated, but
11072 which are not useful to determine the result of the call. In
11073 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11074 COMPOUND_EXPR will be an argument which must be evaluated.
11075 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11076 COMPOUND_EXPR in the chain will contain the tree for the simplified
11077 form of the builtin function call. */
11080 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11082 if (!validate_arg (s1, POINTER_TYPE)
11083 || !validate_arg (s2, POINTER_TYPE))
11088 const char *p1, *p2;
11090 p2 = c_getstr (s2);
11094 p1 = c_getstr (s1);
11097 const char *r = strstr (p1, p2);
11101 return build_int_cst (TREE_TYPE (s1), 0);
11103 /* Return an offset into the constant string argument. */
11104 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11105 s1, size_int (r - p1));
11106 return fold_convert_loc (loc, type, tem);
11109 /* The argument is const char *, and the result is char *, so we need
11110 a type conversion here to avoid a warning. */
11112 return fold_convert_loc (loc, type, s1);
11117 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11121 /* New argument list transforming strstr(s1, s2) to
11122 strchr(s1, s2[0]). */
11123 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11127 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11128 the call, and TYPE is its return type.
11130 Return NULL_TREE if no simplification was possible, otherwise return the
11131 simplified form of the call as a tree.
11133 The simplified form may be a constant or other expression which
11134 computes the same value, but in a more efficient manner (including
11135 calls to other builtin functions).
11137 The call may contain arguments which need to be evaluated, but
11138 which are not useful to determine the result of the call. In
11139 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11140 COMPOUND_EXPR will be an argument which must be evaluated.
11141 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11142 COMPOUND_EXPR in the chain will contain the tree for the simplified
11143 form of the builtin function call. */
11146 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11148 if (!validate_arg (s1, POINTER_TYPE)
11149 || !validate_arg (s2, INTEGER_TYPE))
11155 if (TREE_CODE (s2) != INTEGER_CST)
11158 p1 = c_getstr (s1);
11165 if (target_char_cast (s2, &c))
11168 r = strchr (p1, c);
11171 return build_int_cst (TREE_TYPE (s1), 0);
11173 /* Return an offset into the constant string argument. */
11174 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11175 s1, size_int (r - p1));
11176 return fold_convert_loc (loc, type, tem);
11182 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11183 the call, and TYPE is its return type.
11185 Return NULL_TREE if no simplification was possible, otherwise return the
11186 simplified form of the call as a tree.
11188 The simplified form may be a constant or other expression which
11189 computes the same value, but in a more efficient manner (including
11190 calls to other builtin functions).
11192 The call may contain arguments which need to be evaluated, but
11193 which are not useful to determine the result of the call. In
11194 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11195 COMPOUND_EXPR will be an argument which must be evaluated.
11196 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11197 COMPOUND_EXPR in the chain will contain the tree for the simplified
11198 form of the builtin function call. */
11201 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11203 if (!validate_arg (s1, POINTER_TYPE)
11204 || !validate_arg (s2, INTEGER_TYPE))
11211 if (TREE_CODE (s2) != INTEGER_CST)
11214 p1 = c_getstr (s1);
11221 if (target_char_cast (s2, &c))
11224 r = strrchr (p1, c);
11227 return build_int_cst (TREE_TYPE (s1), 0);
11229 /* Return an offset into the constant string argument. */
11230 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11231 s1, size_int (r - p1));
11232 return fold_convert_loc (loc, type, tem);
11235 if (! integer_zerop (s2))
11238 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11242 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11243 return build_call_expr_loc (loc, fn, 2, s1, s2);
11247 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11248 to the call, and TYPE is its return type.
11250 Return NULL_TREE if no simplification was possible, otherwise return the
11251 simplified form of the call as a tree.
11253 The simplified form may be a constant or other expression which
11254 computes the same value, but in a more efficient manner (including
11255 calls to other builtin functions).
11257 The call may contain arguments which need to be evaluated, but
11258 which are not useful to determine the result of the call. In
11259 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11260 COMPOUND_EXPR will be an argument which must be evaluated.
11261 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11262 COMPOUND_EXPR in the chain will contain the tree for the simplified
11263 form of the builtin function call. */
11266 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11268 if (!validate_arg (s1, POINTER_TYPE)
11269 || !validate_arg (s2, POINTER_TYPE))
11274 const char *p1, *p2;
11276 p2 = c_getstr (s2);
11280 p1 = c_getstr (s1);
11283 const char *r = strpbrk (p1, p2);
11287 return build_int_cst (TREE_TYPE (s1), 0);
11289 /* Return an offset into the constant string argument. */
11290 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11291 s1, size_int (r - p1));
11292 return fold_convert_loc (loc, type, tem);
11296 /* strpbrk(x, "") == NULL.
11297 Evaluate and ignore s1 in case it had side-effects. */
11298 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11301 return NULL_TREE; /* Really call strpbrk. */
11303 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11307 /* New argument list transforming strpbrk(s1, s2) to
11308 strchr(s1, s2[0]). */
11309 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11313 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11316 Return NULL_TREE if no simplification was possible, otherwise return the
11317 simplified form of the call as a tree.
11319 The simplified form may be a constant or other expression which
11320 computes the same value, but in a more efficient manner (including
11321 calls to other builtin functions).
11323 The call may contain arguments which need to be evaluated, but
11324 which are not useful to determine the result of the call. In
11325 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11326 COMPOUND_EXPR will be an argument which must be evaluated.
11327 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11328 COMPOUND_EXPR in the chain will contain the tree for the simplified
11329 form of the builtin function call. */
11332 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11334 if (!validate_arg (dst, POINTER_TYPE)
11335 || !validate_arg (src, POINTER_TYPE))
11339 const char *p = c_getstr (src);
11341 /* If the string length is zero, return the dst parameter. */
11342 if (p && *p == '\0')
11345 if (optimize_insn_for_speed_p ())
11347 /* See if we can store by pieces into (dst + strlen(dst)). */
11349 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11350 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11352 if (!strlen_fn || !strcpy_fn)
11355 /* If we don't have a movstr we don't want to emit an strcpy
11356 call. We have to do that if the length of the source string
11357 isn't computable (in that case we can use memcpy probably
11358 later expanding to a sequence of mov instructions). If we
11359 have movstr instructions we can emit strcpy calls. */
11362 tree len = c_strlen (src, 1);
11363 if (! len || TREE_SIDE_EFFECTS (len))
11367 /* Stabilize the argument list. */
11368 dst = builtin_save_expr (dst);
11370 /* Create strlen (dst). */
11371 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11372 /* Create (dst p+ strlen (dst)). */
11374 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11375 TREE_TYPE (dst), dst, newdst);
11376 newdst = builtin_save_expr (newdst);
11378 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11379 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11385 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11386 arguments to the call.
11388 Return NULL_TREE if no simplification was possible, otherwise return the
11389 simplified form of the call as a tree.
11391 The simplified form may be a constant or other expression which
11392 computes the same value, but in a more efficient manner (including
11393 calls to other builtin functions).
11395 The call may contain arguments which need to be evaluated, but
11396 which are not useful to determine the result of the call. In
11397 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11398 COMPOUND_EXPR will be an argument which must be evaluated.
11399 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11400 COMPOUND_EXPR in the chain will contain the tree for the simplified
11401 form of the builtin function call. */
11404 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11406 if (!validate_arg (dst, POINTER_TYPE)
11407 || !validate_arg (src, POINTER_TYPE)
11408 || !validate_arg (len, INTEGER_TYPE))
11412 const char *p = c_getstr (src);
11414 /* If the requested length is zero, or the src parameter string
11415 length is zero, return the dst parameter. */
11416 if (integer_zerop (len) || (p && *p == '\0'))
11417 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11419 /* If the requested len is greater than or equal to the string
11420 length, call strcat. */
11421 if (TREE_CODE (len) == INTEGER_CST && p
11422 && compare_tree_int (len, strlen (p)) >= 0)
11424 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11426 /* If the replacement _DECL isn't initialized, don't do the
11431 return build_call_expr_loc (loc, fn, 2, dst, src);
11437 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11440 Return NULL_TREE if no simplification was possible, otherwise return the
11441 simplified form of the call as a tree.
11443 The simplified form may be a constant or other expression which
11444 computes the same value, but in a more efficient manner (including
11445 calls to other builtin functions).
11447 The call may contain arguments which need to be evaluated, but
11448 which are not useful to determine the result of the call. In
11449 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11450 COMPOUND_EXPR will be an argument which must be evaluated.
11451 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11452 COMPOUND_EXPR in the chain will contain the tree for the simplified
11453 form of the builtin function call. */
11456 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11458 if (!validate_arg (s1, POINTER_TYPE)
11459 || !validate_arg (s2, POINTER_TYPE))
11463 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11465 /* If both arguments are constants, evaluate at compile-time. */
11468 const size_t r = strspn (p1, p2);
11469 return size_int (r);
11472 /* If either argument is "", return NULL_TREE. */
11473 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11474 /* Evaluate and ignore both arguments in case either one has
11476 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11482 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11485 Return NULL_TREE if no simplification was possible, otherwise return the
11486 simplified form of the call as a tree.
11488 The simplified form may be a constant or other expression which
11489 computes the same value, but in a more efficient manner (including
11490 calls to other builtin functions).
11492 The call may contain arguments which need to be evaluated, but
11493 which are not useful to determine the result of the call. In
11494 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11495 COMPOUND_EXPR will be an argument which must be evaluated.
11496 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11497 COMPOUND_EXPR in the chain will contain the tree for the simplified
11498 form of the builtin function call. */
11501 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11503 if (!validate_arg (s1, POINTER_TYPE)
11504 || !validate_arg (s2, POINTER_TYPE))
11508 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11510 /* If both arguments are constants, evaluate at compile-time. */
11513 const size_t r = strcspn (p1, p2);
11514 return size_int (r);
11517 /* If the first argument is "", return NULL_TREE. */
11518 if (p1 && *p1 == '\0')
11520 /* Evaluate and ignore argument s2 in case it has
11522 return omit_one_operand_loc (loc, size_type_node,
11523 size_zero_node, s2);
11526 /* If the second argument is "", return __builtin_strlen(s1). */
11527 if (p2 && *p2 == '\0')
11529 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11531 /* If the replacement _DECL isn't initialized, don't do the
11536 return build_call_expr_loc (loc, fn, 1, s1);
11542 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11543 to the call. IGNORE is true if the value returned
11544 by the builtin will be ignored. UNLOCKED is true is true if this
11545 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11546 the known length of the string. Return NULL_TREE if no simplification
11550 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11551 bool ignore, bool unlocked, tree len)
11553 /* If we're using an unlocked function, assume the other unlocked
11554 functions exist explicitly. */
11555 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11556 : implicit_built_in_decls[BUILT_IN_FPUTC];
11557 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11558 : implicit_built_in_decls[BUILT_IN_FWRITE];
11560 /* If the return value is used, don't do the transformation. */
11564 /* Verify the arguments in the original call. */
11565 if (!validate_arg (arg0, POINTER_TYPE)
11566 || !validate_arg (arg1, POINTER_TYPE))
11570 len = c_strlen (arg0, 0);
11572 /* Get the length of the string passed to fputs. If the length
11573 can't be determined, punt. */
11575 || TREE_CODE (len) != INTEGER_CST)
11578 switch (compare_tree_int (len, 1))
11580 case -1: /* length is 0, delete the call entirely . */
11581 return omit_one_operand_loc (loc, integer_type_node,
11582 integer_zero_node, arg1);;
11584 case 0: /* length is 1, call fputc. */
11586 const char *p = c_getstr (arg0);
11591 return build_call_expr_loc (loc, fn_fputc, 2,
11592 build_int_cst (NULL_TREE, p[0]), arg1);
11598 case 1: /* length is greater than 1, call fwrite. */
11600 /* If optimizing for size keep fputs. */
11601 if (optimize_function_for_size_p (cfun))
11603 /* New argument list transforming fputs(string, stream) to
11604 fwrite(string, 1, len, stream). */
11606 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11607 size_one_node, len, arg1);
11612 gcc_unreachable ();
11617 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11618 produced. False otherwise. This is done so that we don't output the error
11619 or warning twice or three times. */
11622 fold_builtin_next_arg (tree exp, bool va_start_p)
11624 tree fntype = TREE_TYPE (current_function_decl);
11625 int nargs = call_expr_nargs (exp);
11628 if (!stdarg_p (fntype))
11630 error ("%<va_start%> used in function with fixed args");
11636 if (va_start_p && (nargs != 2))
11638 error ("wrong number of arguments to function %<va_start%>");
11641 arg = CALL_EXPR_ARG (exp, 1);
11643 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11644 when we checked the arguments and if needed issued a warning. */
11649 /* Evidently an out of date version of <stdarg.h>; can't validate
11650 va_start's second argument, but can still work as intended. */
11651 warning (0, "%<__builtin_next_arg%> called without an argument");
11654 else if (nargs > 1)
11656 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11659 arg = CALL_EXPR_ARG (exp, 0);
11662 if (TREE_CODE (arg) == SSA_NAME)
11663 arg = SSA_NAME_VAR (arg);
11665 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11666 or __builtin_next_arg (0) the first time we see it, after checking
11667 the arguments and if needed issuing a warning. */
11668 if (!integer_zerop (arg))
11670 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11672 /* Strip off all nops for the sake of the comparison. This
11673 is not quite the same as STRIP_NOPS. It does more.
11674 We must also strip off INDIRECT_EXPR for C++ reference
11676 while (CONVERT_EXPR_P (arg)
11677 || TREE_CODE (arg) == INDIRECT_REF)
11678 arg = TREE_OPERAND (arg, 0);
11679 if (arg != last_parm)
11681 /* FIXME: Sometimes with the tree optimizers we can get the
11682 not the last argument even though the user used the last
11683 argument. We just warn and set the arg to be the last
11684 argument so that we will get wrong-code because of
11686 warning (0, "second parameter of %<va_start%> not last named argument");
11689 /* Undefined by C99 7.15.1.4p4 (va_start):
11690 "If the parameter parmN is declared with the register storage
11691 class, with a function or array type, or with a type that is
11692 not compatible with the type that results after application of
11693 the default argument promotions, the behavior is undefined."
11695 else if (DECL_REGISTER (arg))
11696 warning (0, "undefined behaviour when second parameter of "
11697 "%<va_start%> is declared with %<register%> storage");
11699 /* We want to verify the second parameter just once before the tree
11700 optimizers are run and then avoid keeping it in the tree,
11701 as otherwise we could warn even for correct code like:
11702 void foo (int i, ...)
11703 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11705 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11707 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11713 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11714 ORIG may be null if this is a 2-argument call. We don't attempt to
11715 simplify calls with more than 3 arguments.
11717 Return NULL_TREE if no simplification was possible, otherwise return the
11718 simplified form of the call as a tree. If IGNORED is true, it means that
11719 the caller does not use the returned value of the function. */
11722 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11723 tree orig, int ignored)
11726 const char *fmt_str = NULL;
11728 /* Verify the required arguments in the original call. We deal with two
11729 types of sprintf() calls: 'sprintf (str, fmt)' and
11730 'sprintf (dest, "%s", orig)'. */
11731 if (!validate_arg (dest, POINTER_TYPE)
11732 || !validate_arg (fmt, POINTER_TYPE))
11734 if (orig && !validate_arg (orig, POINTER_TYPE))
11737 /* Check whether the format is a literal string constant. */
11738 fmt_str = c_getstr (fmt);
11739 if (fmt_str == NULL)
11743 retval = NULL_TREE;
11745 if (!init_target_chars ())
11748 /* If the format doesn't contain % args or %%, use strcpy. */
11749 if (strchr (fmt_str, target_percent) == NULL)
11751 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11756 /* Don't optimize sprintf (buf, "abc", ptr++). */
11760 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11761 'format' is known to contain no % formats. */
11762 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11764 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11767 /* If the format is "%s", use strcpy if the result isn't used. */
11768 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11771 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11776 /* Don't crash on sprintf (str1, "%s"). */
11780 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11783 retval = c_strlen (orig, 1);
11784 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11787 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11790 if (call && retval)
11792 retval = fold_convert_loc
11793 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11795 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11801 /* Expand a call EXP to __builtin_object_size. */
11804 expand_builtin_object_size (tree exp)
11807 int object_size_type;
11808 tree fndecl = get_callee_fndecl (exp);
11810 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11812 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11814 expand_builtin_trap ();
11818 ost = CALL_EXPR_ARG (exp, 1);
11821 if (TREE_CODE (ost) != INTEGER_CST
11822 || tree_int_cst_sgn (ost) < 0
11823 || compare_tree_int (ost, 3) > 0)
11825 error ("%Klast argument of %D is not integer constant between 0 and 3",
11827 expand_builtin_trap ();
11831 object_size_type = tree_low_cst (ost, 0);
11833 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11836 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11837 FCODE is the BUILT_IN_* to use.
11838 Return NULL_RTX if we failed; the caller should emit a normal call,
11839 otherwise try to get the result in TARGET, if convenient (and in
11840 mode MODE if that's convenient). */
11843 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11844 enum built_in_function fcode)
11846 tree dest, src, len, size;
11848 if (!validate_arglist (exp,
11850 fcode == BUILT_IN_MEMSET_CHK
11851 ? INTEGER_TYPE : POINTER_TYPE,
11852 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11855 dest = CALL_EXPR_ARG (exp, 0);
11856 src = CALL_EXPR_ARG (exp, 1);
11857 len = CALL_EXPR_ARG (exp, 2);
11858 size = CALL_EXPR_ARG (exp, 3);
11860 if (! host_integerp (size, 1))
11863 if (host_integerp (len, 1) || integer_all_onesp (size))
11867 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11869 warning_at (tree_nonartificial_location (exp),
11870 0, "%Kcall to %D will always overflow destination buffer",
11871 exp, get_callee_fndecl (exp));
11876 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11877 mem{cpy,pcpy,move,set} is available. */
11880 case BUILT_IN_MEMCPY_CHK:
11881 fn = built_in_decls[BUILT_IN_MEMCPY];
11883 case BUILT_IN_MEMPCPY_CHK:
11884 fn = built_in_decls[BUILT_IN_MEMPCPY];
11886 case BUILT_IN_MEMMOVE_CHK:
11887 fn = built_in_decls[BUILT_IN_MEMMOVE];
11889 case BUILT_IN_MEMSET_CHK:
11890 fn = built_in_decls[BUILT_IN_MEMSET];
11899 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11900 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11901 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11902 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11904 else if (fcode == BUILT_IN_MEMSET_CHK)
11908 unsigned int dest_align
11909 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11911 /* If DEST is not a pointer type, call the normal function. */
11912 if (dest_align == 0)
11915 /* If SRC and DEST are the same (and not volatile), do nothing. */
11916 if (operand_equal_p (src, dest, 0))
11920 if (fcode != BUILT_IN_MEMPCPY_CHK)
11922 /* Evaluate and ignore LEN in case it has side-effects. */
11923 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11924 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11927 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11928 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11931 /* __memmove_chk special case. */
11932 if (fcode == BUILT_IN_MEMMOVE_CHK)
11934 unsigned int src_align
11935 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11937 if (src_align == 0)
11940 /* If src is categorized for a readonly section we can use
11941 normal __memcpy_chk. */
11942 if (readonly_data_expr (src))
11944 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11947 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11948 dest, src, len, size);
11949 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11950 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11951 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11958 /* Emit warning if a buffer overflow is detected at compile time. */
11961 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11965 location_t loc = tree_nonartificial_location (exp);
11969 case BUILT_IN_STRCPY_CHK:
11970 case BUILT_IN_STPCPY_CHK:
11971 /* For __strcat_chk the warning will be emitted only if overflowing
11972 by at least strlen (dest) + 1 bytes. */
11973 case BUILT_IN_STRCAT_CHK:
11974 len = CALL_EXPR_ARG (exp, 1);
11975 size = CALL_EXPR_ARG (exp, 2);
11978 case BUILT_IN_STRNCAT_CHK:
11979 case BUILT_IN_STRNCPY_CHK:
11980 len = CALL_EXPR_ARG (exp, 2);
11981 size = CALL_EXPR_ARG (exp, 3);
11983 case BUILT_IN_SNPRINTF_CHK:
11984 case BUILT_IN_VSNPRINTF_CHK:
11985 len = CALL_EXPR_ARG (exp, 1);
11986 size = CALL_EXPR_ARG (exp, 3);
11989 gcc_unreachable ();
11995 if (! host_integerp (size, 1) || integer_all_onesp (size))
12000 len = c_strlen (len, 1);
12001 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12004 else if (fcode == BUILT_IN_STRNCAT_CHK)
12006 tree src = CALL_EXPR_ARG (exp, 1);
12007 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12009 src = c_strlen (src, 1);
12010 if (! src || ! host_integerp (src, 1))
12012 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12013 exp, get_callee_fndecl (exp));
12016 else if (tree_int_cst_lt (src, size))
12019 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12022 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12023 exp, get_callee_fndecl (exp));
12026 /* Emit warning if a buffer overflow is detected at compile time
12027 in __sprintf_chk/__vsprintf_chk calls. */
12030 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12032 tree size, len, fmt;
12033 const char *fmt_str;
12034 int nargs = call_expr_nargs (exp);
12036 /* Verify the required arguments in the original call. */
12040 size = CALL_EXPR_ARG (exp, 2);
12041 fmt = CALL_EXPR_ARG (exp, 3);
12043 if (! host_integerp (size, 1) || integer_all_onesp (size))
12046 /* Check whether the format is a literal string constant. */
12047 fmt_str = c_getstr (fmt);
12048 if (fmt_str == NULL)
12051 if (!init_target_chars ())
12054 /* If the format doesn't contain % args or %%, we know its size. */
12055 if (strchr (fmt_str, target_percent) == 0)
12056 len = build_int_cstu (size_type_node, strlen (fmt_str));
12057 /* If the format is "%s" and first ... argument is a string literal,
12059 else if (fcode == BUILT_IN_SPRINTF_CHK
12060 && strcmp (fmt_str, target_percent_s) == 0)
12066 arg = CALL_EXPR_ARG (exp, 4);
12067 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12070 len = c_strlen (arg, 1);
12071 if (!len || ! host_integerp (len, 1))
12077 if (! tree_int_cst_lt (len, size))
12078 warning_at (tree_nonartificial_location (exp),
12079 0, "%Kcall to %D will always overflow destination buffer",
12080 exp, get_callee_fndecl (exp));
12083 /* Emit warning if a free is called with address of a variable. */
12086 maybe_emit_free_warning (tree exp)
12088 tree arg = CALL_EXPR_ARG (exp, 0);
12091 if (TREE_CODE (arg) != ADDR_EXPR)
12094 arg = get_base_address (TREE_OPERAND (arg, 0));
12095 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12098 if (SSA_VAR_P (arg))
12099 warning_at (tree_nonartificial_location (exp),
12100 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12102 warning_at (tree_nonartificial_location (exp),
12103 0, "%Kattempt to free a non-heap object", exp);
12106 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12110 fold_builtin_object_size (tree ptr, tree ost)
12112 unsigned HOST_WIDE_INT bytes;
12113 int object_size_type;
12115 if (!validate_arg (ptr, POINTER_TYPE)
12116 || !validate_arg (ost, INTEGER_TYPE))
12121 if (TREE_CODE (ost) != INTEGER_CST
12122 || tree_int_cst_sgn (ost) < 0
12123 || compare_tree_int (ost, 3) > 0)
12126 object_size_type = tree_low_cst (ost, 0);
12128 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12129 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12130 and (size_t) 0 for types 2 and 3. */
12131 if (TREE_SIDE_EFFECTS (ptr))
12132 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12134 if (TREE_CODE (ptr) == ADDR_EXPR)
12136 bytes = compute_builtin_object_size (ptr, object_size_type);
12137 if (double_int_fits_to_tree_p (size_type_node,
12138 uhwi_to_double_int (bytes)))
12139 return build_int_cstu (size_type_node, bytes);
12141 else if (TREE_CODE (ptr) == SSA_NAME)
12143 /* If object size is not known yet, delay folding until
12144 later. Maybe subsequent passes will help determining
12146 bytes = compute_builtin_object_size (ptr, object_size_type);
12147 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12148 && double_int_fits_to_tree_p (size_type_node,
12149 uhwi_to_double_int (bytes)))
12150 return build_int_cstu (size_type_node, bytes);
12156 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12157 DEST, SRC, LEN, and SIZE are the arguments to the call.
12158 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12159 code of the builtin. If MAXLEN is not NULL, it is maximum length
12160 passed as third argument. */
12163 fold_builtin_memory_chk (location_t loc, tree fndecl,
12164 tree dest, tree src, tree len, tree size,
12165 tree maxlen, bool ignore,
12166 enum built_in_function fcode)
12170 if (!validate_arg (dest, POINTER_TYPE)
12171 || !validate_arg (src,
12172 (fcode == BUILT_IN_MEMSET_CHK
12173 ? INTEGER_TYPE : POINTER_TYPE))
12174 || !validate_arg (len, INTEGER_TYPE)
12175 || !validate_arg (size, INTEGER_TYPE))
12178 /* If SRC and DEST are the same (and not volatile), return DEST
12179 (resp. DEST+LEN for __mempcpy_chk). */
12180 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12182 if (fcode != BUILT_IN_MEMPCPY_CHK)
12183 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12187 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12189 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12193 if (! host_integerp (size, 1))
12196 if (! integer_all_onesp (size))
12198 if (! host_integerp (len, 1))
12200 /* If LEN is not constant, try MAXLEN too.
12201 For MAXLEN only allow optimizing into non-_ocs function
12202 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12203 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12205 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12207 /* (void) __mempcpy_chk () can be optimized into
12208 (void) __memcpy_chk (). */
12209 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12213 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12221 if (tree_int_cst_lt (size, maxlen))
12226 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12227 mem{cpy,pcpy,move,set} is available. */
12230 case BUILT_IN_MEMCPY_CHK:
12231 fn = built_in_decls[BUILT_IN_MEMCPY];
12233 case BUILT_IN_MEMPCPY_CHK:
12234 fn = built_in_decls[BUILT_IN_MEMPCPY];
12236 case BUILT_IN_MEMMOVE_CHK:
12237 fn = built_in_decls[BUILT_IN_MEMMOVE];
12239 case BUILT_IN_MEMSET_CHK:
12240 fn = built_in_decls[BUILT_IN_MEMSET];
12249 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12252 /* Fold a call to the __st[rp]cpy_chk builtin.
12253 DEST, SRC, and SIZE are the arguments to the call.
12254 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12255 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12256 strings passed as second argument. */
12259 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12260 tree src, tree size,
12261 tree maxlen, bool ignore,
12262 enum built_in_function fcode)
12266 if (!validate_arg (dest, POINTER_TYPE)
12267 || !validate_arg (src, POINTER_TYPE)
12268 || !validate_arg (size, INTEGER_TYPE))
12271 /* If SRC and DEST are the same (and not volatile), return DEST. */
12272 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12273 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12275 if (! host_integerp (size, 1))
12278 if (! integer_all_onesp (size))
12280 len = c_strlen (src, 1);
12281 if (! len || ! host_integerp (len, 1))
12283 /* If LEN is not constant, try MAXLEN too.
12284 For MAXLEN only allow optimizing into non-_ocs function
12285 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12286 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12288 if (fcode == BUILT_IN_STPCPY_CHK)
12293 /* If return value of __stpcpy_chk is ignored,
12294 optimize into __strcpy_chk. */
12295 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12299 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12302 if (! len || TREE_SIDE_EFFECTS (len))
12305 /* If c_strlen returned something, but not a constant,
12306 transform __strcpy_chk into __memcpy_chk. */
12307 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12311 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12312 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12313 build_call_expr_loc (loc, fn, 4,
12314 dest, src, len, size));
12320 if (! tree_int_cst_lt (maxlen, size))
12324 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12325 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12326 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12330 return build_call_expr_loc (loc, fn, 2, dest, src);
12333 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12334 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12335 length passed as third argument. */
12338 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12339 tree len, tree size, tree maxlen)
12343 if (!validate_arg (dest, POINTER_TYPE)
12344 || !validate_arg (src, POINTER_TYPE)
12345 || !validate_arg (len, INTEGER_TYPE)
12346 || !validate_arg (size, INTEGER_TYPE))
12349 if (! host_integerp (size, 1))
12352 if (! integer_all_onesp (size))
12354 if (! host_integerp (len, 1))
12356 /* If LEN is not constant, try MAXLEN too.
12357 For MAXLEN only allow optimizing into non-_ocs function
12358 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12359 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12365 if (tree_int_cst_lt (size, maxlen))
12369 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12370 fn = built_in_decls[BUILT_IN_STRNCPY];
12374 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12377 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12378 are the arguments to the call. */
12381 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12382 tree src, tree size)
12387 if (!validate_arg (dest, POINTER_TYPE)
12388 || !validate_arg (src, POINTER_TYPE)
12389 || !validate_arg (size, INTEGER_TYPE))
12392 p = c_getstr (src);
12393 /* If the SRC parameter is "", return DEST. */
12394 if (p && *p == '\0')
12395 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12397 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12400 /* If __builtin_strcat_chk is used, assume strcat is available. */
12401 fn = built_in_decls[BUILT_IN_STRCAT];
12405 return build_call_expr_loc (loc, fn, 2, dest, src);
12408 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12412 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12413 tree dest, tree src, tree len, tree size)
12418 if (!validate_arg (dest, POINTER_TYPE)
12419 || !validate_arg (src, POINTER_TYPE)
12420 || !validate_arg (size, INTEGER_TYPE)
12421 || !validate_arg (size, INTEGER_TYPE))
12424 p = c_getstr (src);
12425 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12426 if (p && *p == '\0')
12427 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12428 else if (integer_zerop (len))
12429 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12431 if (! host_integerp (size, 1))
12434 if (! integer_all_onesp (size))
12436 tree src_len = c_strlen (src, 1);
12438 && host_integerp (src_len, 1)
12439 && host_integerp (len, 1)
12440 && ! tree_int_cst_lt (len, src_len))
12442 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12443 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12447 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12452 /* If __builtin_strncat_chk is used, assume strncat is available. */
12453 fn = built_in_decls[BUILT_IN_STRNCAT];
12457 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12460 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12461 a normal call should be emitted rather than expanding the function
12462 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12465 fold_builtin_sprintf_chk (location_t loc, tree exp,
12466 enum built_in_function fcode)
12468 tree dest, size, len, fn, fmt, flag;
12469 const char *fmt_str;
12470 int nargs = call_expr_nargs (exp);
12472 /* Verify the required arguments in the original call. */
12475 dest = CALL_EXPR_ARG (exp, 0);
12476 if (!validate_arg (dest, POINTER_TYPE))
12478 flag = CALL_EXPR_ARG (exp, 1);
12479 if (!validate_arg (flag, INTEGER_TYPE))
12481 size = CALL_EXPR_ARG (exp, 2);
12482 if (!validate_arg (size, INTEGER_TYPE))
12484 fmt = CALL_EXPR_ARG (exp, 3);
12485 if (!validate_arg (fmt, POINTER_TYPE))
12488 if (! host_integerp (size, 1))
12493 if (!init_target_chars ())
12496 /* Check whether the format is a literal string constant. */
12497 fmt_str = c_getstr (fmt);
12498 if (fmt_str != NULL)
12500 /* If the format doesn't contain % args or %%, we know the size. */
12501 if (strchr (fmt_str, target_percent) == 0)
12503 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12504 len = build_int_cstu (size_type_node, strlen (fmt_str));
12506 /* If the format is "%s" and first ... argument is a string literal,
12507 we know the size too. */
12508 else if (fcode == BUILT_IN_SPRINTF_CHK
12509 && strcmp (fmt_str, target_percent_s) == 0)
12515 arg = CALL_EXPR_ARG (exp, 4);
12516 if (validate_arg (arg, POINTER_TYPE))
12518 len = c_strlen (arg, 1);
12519 if (! len || ! host_integerp (len, 1))
12526 if (! integer_all_onesp (size))
12528 if (! len || ! tree_int_cst_lt (len, size))
12532 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12533 or if format doesn't contain % chars or is "%s". */
12534 if (! integer_zerop (flag))
12536 if (fmt_str == NULL)
12538 if (strchr (fmt_str, target_percent) != NULL
12539 && strcmp (fmt_str, target_percent_s))
12543 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12544 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12545 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12549 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12552 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12553 a normal call should be emitted rather than expanding the function
12554 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12555 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12556 passed as second argument. */
12559 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12560 enum built_in_function fcode)
12562 tree dest, size, len, fn, fmt, flag;
12563 const char *fmt_str;
12565 /* Verify the required arguments in the original call. */
12566 if (call_expr_nargs (exp) < 5)
12568 dest = CALL_EXPR_ARG (exp, 0);
12569 if (!validate_arg (dest, POINTER_TYPE))
12571 len = CALL_EXPR_ARG (exp, 1);
12572 if (!validate_arg (len, INTEGER_TYPE))
12574 flag = CALL_EXPR_ARG (exp, 2);
12575 if (!validate_arg (flag, INTEGER_TYPE))
12577 size = CALL_EXPR_ARG (exp, 3);
12578 if (!validate_arg (size, INTEGER_TYPE))
12580 fmt = CALL_EXPR_ARG (exp, 4);
12581 if (!validate_arg (fmt, POINTER_TYPE))
12584 if (! host_integerp (size, 1))
12587 if (! integer_all_onesp (size))
12589 if (! host_integerp (len, 1))
12591 /* If LEN is not constant, try MAXLEN too.
12592 For MAXLEN only allow optimizing into non-_ocs function
12593 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12594 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12600 if (tree_int_cst_lt (size, maxlen))
12604 if (!init_target_chars ())
12607 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12608 or if format doesn't contain % chars or is "%s". */
12609 if (! integer_zerop (flag))
12611 fmt_str = c_getstr (fmt);
12612 if (fmt_str == NULL)
12614 if (strchr (fmt_str, target_percent) != NULL
12615 && strcmp (fmt_str, target_percent_s))
12619 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12621 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12622 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12626 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12629 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12630 FMT and ARG are the arguments to the call; we don't fold cases with
12631 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12633 Return NULL_TREE if no simplification was possible, otherwise return the
12634 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12635 code of the function to be simplified. */
12638 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12639 tree arg, bool ignore,
12640 enum built_in_function fcode)
12642 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12643 const char *fmt_str = NULL;
12645 /* If the return value is used, don't do the transformation. */
12649 /* Verify the required arguments in the original call. */
12650 if (!validate_arg (fmt, POINTER_TYPE))
12653 /* Check whether the format is a literal string constant. */
12654 fmt_str = c_getstr (fmt);
12655 if (fmt_str == NULL)
12658 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12660 /* If we're using an unlocked function, assume the other
12661 unlocked functions exist explicitly. */
12662 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12663 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12667 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12668 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12671 if (!init_target_chars ())
12674 if (strcmp (fmt_str, target_percent_s) == 0
12675 || strchr (fmt_str, target_percent) == NULL)
12679 if (strcmp (fmt_str, target_percent_s) == 0)
12681 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12684 if (!arg || !validate_arg (arg, POINTER_TYPE))
12687 str = c_getstr (arg);
12693 /* The format specifier doesn't contain any '%' characters. */
12694 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12700 /* If the string was "", printf does nothing. */
12701 if (str[0] == '\0')
12702 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12704 /* If the string has length of 1, call putchar. */
12705 if (str[1] == '\0')
12707 /* Given printf("c"), (where c is any one character,)
12708 convert "c"[0] to an int and pass that to the replacement
12710 newarg = build_int_cst (NULL_TREE, str[0]);
12712 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12716 /* If the string was "string\n", call puts("string"). */
12717 size_t len = strlen (str);
12718 if ((unsigned char)str[len - 1] == target_newline)
12720 /* Create a NUL-terminated string that's one char shorter
12721 than the original, stripping off the trailing '\n'. */
12722 char *newstr = XALLOCAVEC (char, len);
12723 memcpy (newstr, str, len - 1);
12724 newstr[len - 1] = 0;
12726 newarg = build_string_literal (len, newstr);
12728 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12731 /* We'd like to arrange to call fputs(string,stdout) here,
12732 but we need stdout and don't have a way to get it yet. */
12737 /* The other optimizations can be done only on the non-va_list variants. */
12738 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12741 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12742 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12744 if (!arg || !validate_arg (arg, POINTER_TYPE))
12747 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12750 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12751 else if (strcmp (fmt_str, target_percent_c) == 0)
12753 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12756 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12762 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12765 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12766 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12767 more than 3 arguments, and ARG may be null in the 2-argument case.
12769 Return NULL_TREE if no simplification was possible, otherwise return the
12770 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12771 code of the function to be simplified. */
12774 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12775 tree fmt, tree arg, bool ignore,
12776 enum built_in_function fcode)
12778 tree fn_fputc, fn_fputs, call = NULL_TREE;
12779 const char *fmt_str = NULL;
12781 /* If the return value is used, don't do the transformation. */
12785 /* Verify the required arguments in the original call. */
12786 if (!validate_arg (fp, POINTER_TYPE))
12788 if (!validate_arg (fmt, POINTER_TYPE))
12791 /* Check whether the format is a literal string constant. */
12792 fmt_str = c_getstr (fmt);
12793 if (fmt_str == NULL)
12796 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12798 /* If we're using an unlocked function, assume the other
12799 unlocked functions exist explicitly. */
12800 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12801 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12805 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12806 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12809 if (!init_target_chars ())
12812 /* If the format doesn't contain % args or %%, use strcpy. */
12813 if (strchr (fmt_str, target_percent) == NULL)
12815 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12819 /* If the format specifier was "", fprintf does nothing. */
12820 if (fmt_str[0] == '\0')
12822 /* If FP has side-effects, just wait until gimplification is
12824 if (TREE_SIDE_EFFECTS (fp))
12827 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12830 /* When "string" doesn't contain %, replace all cases of
12831 fprintf (fp, string) with fputs (string, fp). The fputs
12832 builtin will take care of special cases like length == 1. */
12834 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12837 /* The other optimizations can be done only on the non-va_list variants. */
12838 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12841 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12842 else if (strcmp (fmt_str, target_percent_s) == 0)
12844 if (!arg || !validate_arg (arg, POINTER_TYPE))
12847 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12850 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12851 else if (strcmp (fmt_str, target_percent_c) == 0)
12853 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12856 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12861 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12864 /* Initialize format string characters in the target charset. */
12867 init_target_chars (void)
12872 target_newline = lang_hooks.to_target_charset ('\n');
12873 target_percent = lang_hooks.to_target_charset ('%');
12874 target_c = lang_hooks.to_target_charset ('c');
12875 target_s = lang_hooks.to_target_charset ('s');
12876 if (target_newline == 0 || target_percent == 0 || target_c == 0
12880 target_percent_c[0] = target_percent;
12881 target_percent_c[1] = target_c;
12882 target_percent_c[2] = '\0';
12884 target_percent_s[0] = target_percent;
12885 target_percent_s[1] = target_s;
12886 target_percent_s[2] = '\0';
12888 target_percent_s_newline[0] = target_percent;
12889 target_percent_s_newline[1] = target_s;
12890 target_percent_s_newline[2] = target_newline;
12891 target_percent_s_newline[3] = '\0';
12898 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12899 and no overflow/underflow occurred. INEXACT is true if M was not
12900 exactly calculated. TYPE is the tree type for the result. This
12901 function assumes that you cleared the MPFR flags and then
12902 calculated M to see if anything subsequently set a flag prior to
12903 entering this function. Return NULL_TREE if any checks fail. */
12906 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12908 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12909 overflow/underflow occurred. If -frounding-math, proceed iff the
12910 result of calling FUNC was exact. */
12911 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12912 && (!flag_rounding_math || !inexact))
12914 REAL_VALUE_TYPE rr;
12916 real_from_mpfr (&rr, m, type, GMP_RNDN);
12917 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12918 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12919 but the mpft_t is not, then we underflowed in the
12921 if (real_isfinite (&rr)
12922 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12924 REAL_VALUE_TYPE rmode;
12926 real_convert (&rmode, TYPE_MODE (type), &rr);
12927 /* Proceed iff the specified mode can hold the value. */
12928 if (real_identical (&rmode, &rr))
12929 return build_real (type, rmode);
12935 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12936 number and no overflow/underflow occurred. INEXACT is true if M
12937 was not exactly calculated. TYPE is the tree type for the result.
12938 This function assumes that you cleared the MPFR flags and then
12939 calculated M to see if anything subsequently set a flag prior to
12940 entering this function. Return NULL_TREE if any checks fail, if
12941 FORCE_CONVERT is true, then bypass the checks. */
12944 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12946 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12947 overflow/underflow occurred. If -frounding-math, proceed iff the
12948 result of calling FUNC was exact. */
12950 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12951 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12952 && (!flag_rounding_math || !inexact)))
12954 REAL_VALUE_TYPE re, im;
12956 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12957 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12958 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12959 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12960 but the mpft_t is not, then we underflowed in the
12963 || (real_isfinite (&re) && real_isfinite (&im)
12964 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12965 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12967 REAL_VALUE_TYPE re_mode, im_mode;
12969 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12970 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12971 /* Proceed iff the specified mode can hold the value. */
12973 || (real_identical (&re_mode, &re)
12974 && real_identical (&im_mode, &im)))
12975 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12976 build_real (TREE_TYPE (type), im_mode));
12982 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12983 FUNC on it and return the resulting value as a tree with type TYPE.
12984 If MIN and/or MAX are not NULL, then the supplied ARG must be
12985 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12986 acceptable values, otherwise they are not. The mpfr precision is
12987 set to the precision of TYPE. We assume that function FUNC returns
12988 zero if the result could be calculated exactly within the requested
12992 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12993 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12996 tree result = NULL_TREE;
13000 /* To proceed, MPFR must exactly represent the target floating point
13001 format, which only happens when the target base equals two. */
13002 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13003 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13005 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13007 if (real_isfinite (ra)
13008 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13009 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13011 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13012 const int prec = fmt->p;
13013 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13017 mpfr_init2 (m, prec);
13018 mpfr_from_real (m, ra, GMP_RNDN);
13019 mpfr_clear_flags ();
13020 inexact = func (m, m, rnd);
13021 result = do_mpfr_ckconv (m, type, inexact);
13029 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13030 FUNC on it and return the resulting value as a tree with type TYPE.
13031 The mpfr precision is set to the precision of TYPE. We assume that
13032 function FUNC returns zero if the result could be calculated
13033 exactly within the requested precision. */
13036 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13037 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13039 tree result = NULL_TREE;
13044 /* To proceed, MPFR must exactly represent the target floating point
13045 format, which only happens when the target base equals two. */
13046 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13047 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13048 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13050 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13051 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13053 if (real_isfinite (ra1) && real_isfinite (ra2))
13055 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13056 const int prec = fmt->p;
13057 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13061 mpfr_inits2 (prec, m1, m2, NULL);
13062 mpfr_from_real (m1, ra1, GMP_RNDN);
13063 mpfr_from_real (m2, ra2, GMP_RNDN);
13064 mpfr_clear_flags ();
13065 inexact = func (m1, m1, m2, rnd);
13066 result = do_mpfr_ckconv (m1, type, inexact);
13067 mpfr_clears (m1, m2, NULL);
13074 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13075 FUNC on it and return the resulting value as a tree with type TYPE.
13076 The mpfr precision is set to the precision of TYPE. We assume that
13077 function FUNC returns zero if the result could be calculated
13078 exactly within the requested precision. */
13081 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13082 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13084 tree result = NULL_TREE;
13090 /* To proceed, MPFR must exactly represent the target floating point
13091 format, which only happens when the target base equals two. */
13092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13093 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13094 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13095 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13097 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13098 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13099 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13101 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13103 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13104 const int prec = fmt->p;
13105 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13109 mpfr_inits2 (prec, m1, m2, m3, NULL);
13110 mpfr_from_real (m1, ra1, GMP_RNDN);
13111 mpfr_from_real (m2, ra2, GMP_RNDN);
13112 mpfr_from_real (m3, ra3, GMP_RNDN);
13113 mpfr_clear_flags ();
13114 inexact = func (m1, m1, m2, m3, rnd);
13115 result = do_mpfr_ckconv (m1, type, inexact);
13116 mpfr_clears (m1, m2, m3, NULL);
13123 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13124 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13125 If ARG_SINP and ARG_COSP are NULL then the result is returned
13126 as a complex value.
13127 The type is taken from the type of ARG and is used for setting the
13128 precision of the calculation and results. */
13131 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13133 tree const type = TREE_TYPE (arg);
13134 tree result = NULL_TREE;
13138 /* To proceed, MPFR must exactly represent the target floating point
13139 format, which only happens when the target base equals two. */
13140 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13141 && TREE_CODE (arg) == REAL_CST
13142 && !TREE_OVERFLOW (arg))
13144 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13146 if (real_isfinite (ra))
13148 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13149 const int prec = fmt->p;
13150 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13151 tree result_s, result_c;
13155 mpfr_inits2 (prec, m, ms, mc, NULL);
13156 mpfr_from_real (m, ra, GMP_RNDN);
13157 mpfr_clear_flags ();
13158 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13159 result_s = do_mpfr_ckconv (ms, type, inexact);
13160 result_c = do_mpfr_ckconv (mc, type, inexact);
13161 mpfr_clears (m, ms, mc, NULL);
13162 if (result_s && result_c)
13164 /* If we are to return in a complex value do so. */
13165 if (!arg_sinp && !arg_cosp)
13166 return build_complex (build_complex_type (type),
13167 result_c, result_s);
13169 /* Dereference the sin/cos pointer arguments. */
13170 arg_sinp = build_fold_indirect_ref (arg_sinp);
13171 arg_cosp = build_fold_indirect_ref (arg_cosp);
13172 /* Proceed if valid pointer type were passed in. */
13173 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13174 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13176 /* Set the values. */
13177 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13179 TREE_SIDE_EFFECTS (result_s) = 1;
13180 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13182 TREE_SIDE_EFFECTS (result_c) = 1;
13183 /* Combine the assignments into a compound expr. */
13184 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13185 result_s, result_c));
13193 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13194 two-argument mpfr order N Bessel function FUNC on them and return
13195 the resulting value as a tree with type TYPE. The mpfr precision
13196 is set to the precision of TYPE. We assume that function FUNC
13197 returns zero if the result could be calculated exactly within the
13198 requested precision. */
13200 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13201 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13202 const REAL_VALUE_TYPE *min, bool inclusive)
13204 tree result = NULL_TREE;
13209 /* To proceed, MPFR must exactly represent the target floating point
13210 format, which only happens when the target base equals two. */
13211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13212 && host_integerp (arg1, 0)
13213 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13215 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13216 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13219 && real_isfinite (ra)
13220 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13222 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13223 const int prec = fmt->p;
13224 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13228 mpfr_init2 (m, prec);
13229 mpfr_from_real (m, ra, GMP_RNDN);
13230 mpfr_clear_flags ();
13231 inexact = func (m, n, m, rnd);
13232 result = do_mpfr_ckconv (m, type, inexact);
13240 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13241 the pointer *(ARG_QUO) and return the result. The type is taken
13242 from the type of ARG0 and is used for setting the precision of the
13243 calculation and results. */
13246 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13248 tree const type = TREE_TYPE (arg0);
13249 tree result = NULL_TREE;
13254 /* To proceed, MPFR must exactly represent the target floating point
13255 format, which only happens when the target base equals two. */
13256 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13257 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13258 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13260 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13261 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13263 if (real_isfinite (ra0) && real_isfinite (ra1))
13265 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13266 const int prec = fmt->p;
13267 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13272 mpfr_inits2 (prec, m0, m1, NULL);
13273 mpfr_from_real (m0, ra0, GMP_RNDN);
13274 mpfr_from_real (m1, ra1, GMP_RNDN);
13275 mpfr_clear_flags ();
13276 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13277 /* Remquo is independent of the rounding mode, so pass
13278 inexact=0 to do_mpfr_ckconv(). */
13279 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13280 mpfr_clears (m0, m1, NULL);
13283 /* MPFR calculates quo in the host's long so it may
13284 return more bits in quo than the target int can hold
13285 if sizeof(host long) > sizeof(target int). This can
13286 happen even for native compilers in LP64 mode. In
13287 these cases, modulo the quo value with the largest
13288 number that the target int can hold while leaving one
13289 bit for the sign. */
13290 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13291 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13293 /* Dereference the quo pointer argument. */
13294 arg_quo = build_fold_indirect_ref (arg_quo);
13295 /* Proceed iff a valid pointer type was passed in. */
13296 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13298 /* Set the value. */
13299 tree result_quo = fold_build2 (MODIFY_EXPR,
13300 TREE_TYPE (arg_quo), arg_quo,
13301 build_int_cst (NULL, integer_quo));
13302 TREE_SIDE_EFFECTS (result_quo) = 1;
13303 /* Combine the quo assignment with the rem. */
13304 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13305 result_quo, result_rem));
13313 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13314 resulting value as a tree with type TYPE. The mpfr precision is
13315 set to the precision of TYPE. We assume that this mpfr function
13316 returns zero if the result could be calculated exactly within the
13317 requested precision. In addition, the integer pointer represented
13318 by ARG_SG will be dereferenced and set to the appropriate signgam
13322 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13324 tree result = NULL_TREE;
13328 /* To proceed, MPFR must exactly represent the target floating point
13329 format, which only happens when the target base equals two. Also
13330 verify ARG is a constant and that ARG_SG is an int pointer. */
13331 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13332 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13333 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13334 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13336 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13338 /* In addition to NaN and Inf, the argument cannot be zero or a
13339 negative integer. */
13340 if (real_isfinite (ra)
13341 && ra->cl != rvc_zero
13342 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13344 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13345 const int prec = fmt->p;
13346 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13351 mpfr_init2 (m, prec);
13352 mpfr_from_real (m, ra, GMP_RNDN);
13353 mpfr_clear_flags ();
13354 inexact = mpfr_lgamma (m, &sg, m, rnd);
13355 result_lg = do_mpfr_ckconv (m, type, inexact);
13361 /* Dereference the arg_sg pointer argument. */
13362 arg_sg = build_fold_indirect_ref (arg_sg);
13363 /* Assign the signgam value into *arg_sg. */
13364 result_sg = fold_build2 (MODIFY_EXPR,
13365 TREE_TYPE (arg_sg), arg_sg,
13366 build_int_cst (NULL, sg));
13367 TREE_SIDE_EFFECTS (result_sg) = 1;
13368 /* Combine the signgam assignment with the lgamma result. */
13369 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13370 result_sg, result_lg));
13378 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13379 function FUNC on it and return the resulting value as a tree with
13380 type TYPE. The mpfr precision is set to the precision of TYPE. We
13381 assume that function FUNC returns zero if the result could be
13382 calculated exactly within the requested precision. */
13385 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13387 tree result = NULL_TREE;
13391 /* To proceed, MPFR must exactly represent the target floating point
13392 format, which only happens when the target base equals two. */
13393 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13395 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13397 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13398 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13400 if (real_isfinite (re) && real_isfinite (im))
13402 const struct real_format *const fmt =
13403 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13404 const int prec = fmt->p;
13405 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13406 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13410 mpc_init2 (m, prec);
13411 mpfr_from_real (mpc_realref(m), re, rnd);
13412 mpfr_from_real (mpc_imagref(m), im, rnd);
13413 mpfr_clear_flags ();
13414 inexact = func (m, m, crnd);
13415 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13423 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13424 mpc function FUNC on it and return the resulting value as a tree
13425 with type TYPE. The mpfr precision is set to the precision of
13426 TYPE. We assume that function FUNC returns zero if the result
13427 could be calculated exactly within the requested precision. If
13428 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13429 in the arguments and/or results. */
13432 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13433 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13435 tree result = NULL_TREE;
13440 /* To proceed, MPFR must exactly represent the target floating point
13441 format, which only happens when the target base equals two. */
13442 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13444 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13445 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13446 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13448 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13449 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13450 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13451 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13454 || (real_isfinite (re0) && real_isfinite (im0)
13455 && real_isfinite (re1) && real_isfinite (im1)))
13457 const struct real_format *const fmt =
13458 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13459 const int prec = fmt->p;
13460 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13461 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13465 mpc_init2 (m0, prec);
13466 mpc_init2 (m1, prec);
13467 mpfr_from_real (mpc_realref(m0), re0, rnd);
13468 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13469 mpfr_from_real (mpc_realref(m1), re1, rnd);
13470 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13471 mpfr_clear_flags ();
13472 inexact = func (m0, m0, m1, crnd);
13473 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13483 The functions below provide an alternate interface for folding
13484 builtin function calls presented as GIMPLE_CALL statements rather
13485 than as CALL_EXPRs. The folded result is still expressed as a
13486 tree. There is too much code duplication in the handling of
13487 varargs functions, and a more intrusive re-factoring would permit
13488 better sharing of code between the tree and statement-based
13489 versions of these functions. */
13491 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13492 along with N new arguments specified as the "..." parameters. SKIP
13493 is the number of arguments in STMT to be omitted. This function is used
13494 to do varargs-to-varargs transformations. */
13497 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13499 int oldnargs = gimple_call_num_args (stmt);
13500 int nargs = oldnargs - skip + n;
13501 tree fntype = TREE_TYPE (fndecl);
13502 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13506 location_t loc = gimple_location (stmt);
13508 buffer = XALLOCAVEC (tree, nargs);
13510 for (i = 0; i < n; i++)
13511 buffer[i] = va_arg (ap, tree);
13513 for (j = skip; j < oldnargs; j++, i++)
13514 buffer[i] = gimple_call_arg (stmt, j);
13516 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13519 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13520 a normal call should be emitted rather than expanding the function
13521 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13524 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13526 tree dest, size, len, fn, fmt, flag;
13527 const char *fmt_str;
13528 int nargs = gimple_call_num_args (stmt);
13530 /* Verify the required arguments in the original call. */
13533 dest = gimple_call_arg (stmt, 0);
13534 if (!validate_arg (dest, POINTER_TYPE))
13536 flag = gimple_call_arg (stmt, 1);
13537 if (!validate_arg (flag, INTEGER_TYPE))
13539 size = gimple_call_arg (stmt, 2);
13540 if (!validate_arg (size, INTEGER_TYPE))
13542 fmt = gimple_call_arg (stmt, 3);
13543 if (!validate_arg (fmt, POINTER_TYPE))
13546 if (! host_integerp (size, 1))
13551 if (!init_target_chars ())
13554 /* Check whether the format is a literal string constant. */
13555 fmt_str = c_getstr (fmt);
13556 if (fmt_str != NULL)
13558 /* If the format doesn't contain % args or %%, we know the size. */
13559 if (strchr (fmt_str, target_percent) == 0)
13561 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13562 len = build_int_cstu (size_type_node, strlen (fmt_str));
13564 /* If the format is "%s" and first ... argument is a string literal,
13565 we know the size too. */
13566 else if (fcode == BUILT_IN_SPRINTF_CHK
13567 && strcmp (fmt_str, target_percent_s) == 0)
13573 arg = gimple_call_arg (stmt, 4);
13574 if (validate_arg (arg, POINTER_TYPE))
13576 len = c_strlen (arg, 1);
13577 if (! len || ! host_integerp (len, 1))
13584 if (! integer_all_onesp (size))
13586 if (! len || ! tree_int_cst_lt (len, size))
13590 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13591 or if format doesn't contain % chars or is "%s". */
13592 if (! integer_zerop (flag))
13594 if (fmt_str == NULL)
13596 if (strchr (fmt_str, target_percent) != NULL
13597 && strcmp (fmt_str, target_percent_s))
13601 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13602 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13603 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13607 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13610 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13611 a normal call should be emitted rather than expanding the function
13612 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13613 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13614 passed as second argument. */
13617 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13618 enum built_in_function fcode)
13620 tree dest, size, len, fn, fmt, flag;
13621 const char *fmt_str;
13623 /* Verify the required arguments in the original call. */
13624 if (gimple_call_num_args (stmt) < 5)
13626 dest = gimple_call_arg (stmt, 0);
13627 if (!validate_arg (dest, POINTER_TYPE))
13629 len = gimple_call_arg (stmt, 1);
13630 if (!validate_arg (len, INTEGER_TYPE))
13632 flag = gimple_call_arg (stmt, 2);
13633 if (!validate_arg (flag, INTEGER_TYPE))
13635 size = gimple_call_arg (stmt, 3);
13636 if (!validate_arg (size, INTEGER_TYPE))
13638 fmt = gimple_call_arg (stmt, 4);
13639 if (!validate_arg (fmt, POINTER_TYPE))
13642 if (! host_integerp (size, 1))
13645 if (! integer_all_onesp (size))
13647 if (! host_integerp (len, 1))
13649 /* If LEN is not constant, try MAXLEN too.
13650 For MAXLEN only allow optimizing into non-_ocs function
13651 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13652 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13658 if (tree_int_cst_lt (size, maxlen))
13662 if (!init_target_chars ())
13665 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13666 or if format doesn't contain % chars or is "%s". */
13667 if (! integer_zerop (flag))
13669 fmt_str = c_getstr (fmt);
13670 if (fmt_str == NULL)
13672 if (strchr (fmt_str, target_percent) != NULL
13673 && strcmp (fmt_str, target_percent_s))
13677 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13679 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13680 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13684 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13687 /* Builtins with folding operations that operate on "..." arguments
13688 need special handling; we need to store the arguments in a convenient
13689 data structure before attempting any folding. Fortunately there are
13690 only a few builtins that fall into this category. FNDECL is the
13691 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13692 result of the function call is ignored. */
13695 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13696 bool ignore ATTRIBUTE_UNUSED)
13698 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13699 tree ret = NULL_TREE;
13703 case BUILT_IN_SPRINTF_CHK:
13704 case BUILT_IN_VSPRINTF_CHK:
13705 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13708 case BUILT_IN_SNPRINTF_CHK:
13709 case BUILT_IN_VSNPRINTF_CHK:
13710 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13717 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13718 TREE_NO_WARNING (ret) = 1;
13724 /* A wrapper function for builtin folding that prevents warnings for
13725 "statement without effect" and the like, caused by removing the
13726 call node earlier than the warning is generated. */
13729 fold_call_stmt (gimple stmt, bool ignore)
13731 tree ret = NULL_TREE;
13732 tree fndecl = gimple_call_fndecl (stmt);
13733 location_t loc = gimple_location (stmt);
13735 && TREE_CODE (fndecl) == FUNCTION_DECL
13736 && DECL_BUILT_IN (fndecl)
13737 && !gimple_call_va_arg_pack_p (stmt))
13739 int nargs = gimple_call_num_args (stmt);
13741 if (avoid_folding_inline_builtin (fndecl))
13743 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13745 return targetm.fold_builtin (fndecl, nargs,
13747 ? gimple_call_arg_ptr (stmt, 0)
13748 : &error_mark_node), ignore);
13752 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13754 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13756 for (i = 0; i < nargs; i++)
13757 args[i] = gimple_call_arg (stmt, i);
13758 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13761 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13764 /* Propagate location information from original call to
13765 expansion of builtin. Otherwise things like
13766 maybe_emit_chk_warning, that operate on the expansion
13767 of a builtin, will use the wrong location information. */
13768 if (gimple_has_location (stmt))
13770 tree realret = ret;
13771 if (TREE_CODE (ret) == NOP_EXPR)
13772 realret = TREE_OPERAND (ret, 0);
13773 if (CAN_HAVE_LOCATION_P (realret)
13774 && !EXPR_HAS_LOCATION (realret))
13775 SET_EXPR_LOCATION (realret, loc);
13785 /* Look up the function in built_in_decls that corresponds to DECL
13786 and set ASMSPEC as its user assembler name. DECL must be a
13787 function decl that declares a builtin. */
13790 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13793 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13794 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13797 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13798 set_user_assembler_name (builtin, asmspec);
13799 switch (DECL_FUNCTION_CODE (decl))
13801 case BUILT_IN_MEMCPY:
13802 init_block_move_fn (asmspec);
13803 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13805 case BUILT_IN_MEMSET:
13806 init_block_clear_fn (asmspec);
13807 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13809 case BUILT_IN_MEMMOVE:
13810 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13812 case BUILT_IN_MEMCMP:
13813 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13815 case BUILT_IN_ABORT:
13816 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13819 if (INT_TYPE_SIZE < BITS_PER_WORD)
13821 set_user_assembler_libfunc ("ffs", asmspec);
13822 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13823 MODE_INT, 0), "ffs");
13831 /* Return true if DECL is a builtin that expands to a constant or similarly
13834 is_simple_builtin (tree decl)
13836 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13837 switch (DECL_FUNCTION_CODE (decl))
13839 /* Builtins that expand to constants. */
13840 case BUILT_IN_CONSTANT_P:
13841 case BUILT_IN_EXPECT:
13842 case BUILT_IN_OBJECT_SIZE:
13843 case BUILT_IN_UNREACHABLE:
13844 /* Simple register moves or loads from stack. */
13845 case BUILT_IN_RETURN_ADDRESS:
13846 case BUILT_IN_EXTRACT_RETURN_ADDR:
13847 case BUILT_IN_FROB_RETURN_ADDR:
13848 case BUILT_IN_RETURN:
13849 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13850 case BUILT_IN_FRAME_ADDRESS:
13851 case BUILT_IN_VA_END:
13852 case BUILT_IN_STACK_SAVE:
13853 case BUILT_IN_STACK_RESTORE:
13854 /* Exception state returns or moves registers around. */
13855 case BUILT_IN_EH_FILTER:
13856 case BUILT_IN_EH_POINTER:
13857 case BUILT_IN_EH_COPY_VALUES:
13867 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13868 most probably expanded inline into reasonably simple code. This is a
13869 superset of is_simple_builtin. */
13871 is_inexpensive_builtin (tree decl)
13875 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13877 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13878 switch (DECL_FUNCTION_CODE (decl))
13881 case BUILT_IN_ALLOCA:
13882 case BUILT_IN_BSWAP32:
13883 case BUILT_IN_BSWAP64:
13885 case BUILT_IN_CLZIMAX:
13886 case BUILT_IN_CLZL:
13887 case BUILT_IN_CLZLL:
13889 case BUILT_IN_CTZIMAX:
13890 case BUILT_IN_CTZL:
13891 case BUILT_IN_CTZLL:
13893 case BUILT_IN_FFSIMAX:
13894 case BUILT_IN_FFSL:
13895 case BUILT_IN_FFSLL:
13896 case BUILT_IN_IMAXABS:
13897 case BUILT_IN_FINITE:
13898 case BUILT_IN_FINITEF:
13899 case BUILT_IN_FINITEL:
13900 case BUILT_IN_FINITED32:
13901 case BUILT_IN_FINITED64:
13902 case BUILT_IN_FINITED128:
13903 case BUILT_IN_FPCLASSIFY:
13904 case BUILT_IN_ISFINITE:
13905 case BUILT_IN_ISINF_SIGN:
13906 case BUILT_IN_ISINF:
13907 case BUILT_IN_ISINFF:
13908 case BUILT_IN_ISINFL:
13909 case BUILT_IN_ISINFD32:
13910 case BUILT_IN_ISINFD64:
13911 case BUILT_IN_ISINFD128:
13912 case BUILT_IN_ISNAN:
13913 case BUILT_IN_ISNANF:
13914 case BUILT_IN_ISNANL:
13915 case BUILT_IN_ISNAND32:
13916 case BUILT_IN_ISNAND64:
13917 case BUILT_IN_ISNAND128:
13918 case BUILT_IN_ISNORMAL:
13919 case BUILT_IN_ISGREATER:
13920 case BUILT_IN_ISGREATEREQUAL:
13921 case BUILT_IN_ISLESS:
13922 case BUILT_IN_ISLESSEQUAL:
13923 case BUILT_IN_ISLESSGREATER:
13924 case BUILT_IN_ISUNORDERED:
13925 case BUILT_IN_VA_ARG_PACK:
13926 case BUILT_IN_VA_ARG_PACK_LEN:
13927 case BUILT_IN_VA_COPY:
13928 case BUILT_IN_TRAP:
13929 case BUILT_IN_SAVEREGS:
13930 case BUILT_IN_POPCOUNTL:
13931 case BUILT_IN_POPCOUNTLL:
13932 case BUILT_IN_POPCOUNTIMAX:
13933 case BUILT_IN_POPCOUNT:
13934 case BUILT_IN_PARITYL:
13935 case BUILT_IN_PARITYLL:
13936 case BUILT_IN_PARITYIMAX:
13937 case BUILT_IN_PARITY:
13938 case BUILT_IN_LABS:
13939 case BUILT_IN_LLABS:
13940 case BUILT_IN_PREFETCH:
13944 return is_simple_builtin (decl);