1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
64 struct target_builtins default_target_builtins;
66 struct target_builtins *this_target_builtins = &default_target_builtins;
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
76 #include "builtins.def"
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 enum machine_mode, int);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_alloca (tree, rtx);
136 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_expect (location_t, tree, tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static bool readonly_data_expr (tree);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_sqrt (location_t, tree, tree);
154 static tree fold_builtin_cbrt (location_t, tree, tree);
155 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_cos (location_t, tree, tree, tree);
158 static tree fold_builtin_cosh (location_t, tree, tree, tree);
159 static tree fold_builtin_tan (tree, tree);
160 static tree fold_builtin_trunc (location_t, tree, tree);
161 static tree fold_builtin_floor (location_t, tree, tree);
162 static tree fold_builtin_ceil (location_t, tree, tree);
163 static tree fold_builtin_round (location_t, tree, tree);
164 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
165 static tree fold_builtin_bitop (tree, tree);
166 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
167 static tree fold_builtin_strchr (location_t, tree, tree, tree);
168 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
170 static tree fold_builtin_strcmp (location_t, tree, tree);
171 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
172 static tree fold_builtin_signbit (location_t, tree, tree);
173 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_isascii (location_t, tree);
175 static tree fold_builtin_toascii (location_t, tree);
176 static tree fold_builtin_isdigit (location_t, tree);
177 static tree fold_builtin_fabs (location_t, tree, tree);
178 static tree fold_builtin_abs (location_t, tree, tree);
179 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
182 static tree fold_builtin_0 (location_t, tree, bool);
183 static tree fold_builtin_1 (location_t, tree, tree, bool);
184 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
186 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
187 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strcat (location_t, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
206 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
207 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
208 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
209 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
210 enum built_in_function);
211 static bool init_target_chars (void);
213 static unsigned HOST_WIDE_INT target_newline;
214 static unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 static char target_percent_c[3];
218 static char target_percent_s[3];
219 static char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
236 is_builtin_name (const char *name)
238 if (strncmp (name, "__builtin_", 10) == 0)
240 if (strncmp (name, "__sync_", 7) == 0)
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
260 called_as_built_in (tree node)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
273 get_object_alignment (tree exp, unsigned int max_align)
275 HOST_WIDE_INT bitsize, bitpos;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
279 unsigned int align, inner;
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp) == CONST_DECL)
289 exp = DECL_INITIAL (exp);
291 && TREE_CODE (exp) != LABEL_DECL)
292 align = DECL_ALIGN (exp);
293 else if (CONSTANT_CLASS_P (exp))
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 #ifdef CONSTANT_ALIGNMENT
297 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
300 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == INDIRECT_REF)
303 align = TYPE_ALIGN (TREE_TYPE (exp));
304 else if (TREE_CODE (exp) == MISALIGNED_INDIRECT_REF)
306 tree op1 = TREE_OPERAND (exp, 1);
307 align = integer_zerop (op1) ? BITS_PER_UNIT : TREE_INT_CST_LOW (op1);
309 else if (TREE_CODE (exp) == MEM_REF)
311 tree addr = TREE_OPERAND (exp, 0);
312 struct ptr_info_def *pi;
313 if (TREE_CODE (addr) == BIT_AND_EXPR
314 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
316 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
317 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
318 align *= BITS_PER_UNIT;
319 addr = TREE_OPERAND (addr, 0);
322 align = BITS_PER_UNIT;
323 if (TREE_CODE (addr) == SSA_NAME
324 && (pi = SSA_NAME_PTR_INFO (addr)))
326 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
327 align = MAX (pi->align * BITS_PER_UNIT, align);
329 else if (TREE_CODE (addr) == ADDR_EXPR)
330 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
332 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
334 else if (TREE_CODE (exp) == TARGET_MEM_REF
336 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
338 struct ptr_info_def *pi;
339 tree addr = TMR_BASE (exp);
340 if (TREE_CODE (addr) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
344 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
345 align *= BITS_PER_UNIT;
346 addr = TREE_OPERAND (addr, 0);
349 align = BITS_PER_UNIT;
350 if (TREE_CODE (addr) == SSA_NAME
351 && (pi = SSA_NAME_PTR_INFO (addr)))
353 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
354 align = MAX (pi->align * BITS_PER_UNIT, align);
356 else if (TREE_CODE (addr) == ADDR_EXPR)
357 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
359 if (TMR_OFFSET (exp))
360 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
361 if (TMR_INDEX (exp) && TMR_STEP (exp))
363 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
364 align = MIN (align, (step & -step) * BITS_PER_UNIT);
366 else if (TMR_INDEX (exp))
367 align = BITS_PER_UNIT;
369 else if (TREE_CODE (exp) == TARGET_MEM_REF
372 align = get_object_alignment (TMR_SYMBOL (exp), max_align);
373 if (TMR_OFFSET (exp))
374 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
375 if (TMR_INDEX (exp) && TMR_STEP (exp))
377 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
378 align = MIN (align, (step & -step) * BITS_PER_UNIT);
380 else if (TMR_INDEX (exp))
381 align = BITS_PER_UNIT;
384 align = BITS_PER_UNIT;
386 /* If there is a non-constant offset part extract the maximum
387 alignment that can prevail. */
393 if (TREE_CODE (offset) == PLUS_EXPR)
395 next_offset = TREE_OPERAND (offset, 0);
396 offset = TREE_OPERAND (offset, 1);
400 if (host_integerp (offset, 1))
402 /* Any overflow in calculating offset_bits won't change
405 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
408 inner = MIN (inner, (offset_bits & -offset_bits));
410 else if (TREE_CODE (offset) == MULT_EXPR
411 && host_integerp (TREE_OPERAND (offset, 1), 1))
413 /* Any overflow in calculating offset_factor won't change
415 unsigned offset_factor
416 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
420 inner = MIN (inner, (offset_factor & -offset_factor));
424 inner = MIN (inner, BITS_PER_UNIT);
427 offset = next_offset;
430 /* Alignment is innermost object alignment adjusted by the constant
431 and non-constant offset parts. */
432 align = MIN (align, inner);
433 bitpos = bitpos & (align - 1);
435 /* align and bitpos now specify known low bits of the pointer.
436 ptr & (align - 1) == bitpos. */
439 align = (bitpos & -bitpos);
441 return MIN (align, max_align);
444 /* Returns true iff we can trust that alignment information has been
445 calculated properly. */
448 can_trust_pointer_alignment (void)
450 /* We rely on TER to compute accurate alignment information. */
451 return (optimize && flag_tree_ter);
454 /* Return the alignment in bits of EXP, a pointer valued expression.
455 But don't return more than MAX_ALIGN no matter what.
456 The alignment returned is, by default, the alignment of the thing that
457 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
459 Otherwise, look at the expression to see if we can do better, i.e., if the
460 expression is actually pointing at an object whose alignment is tighter. */
463 get_pointer_alignment (tree exp, unsigned int max_align)
467 if (TREE_CODE (exp) == ADDR_EXPR)
468 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
469 else if (TREE_CODE (exp) == SSA_NAME
470 && POINTER_TYPE_P (TREE_TYPE (exp)))
472 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
475 return BITS_PER_UNIT;
476 if (pi->misalign != 0)
477 align = (pi->misalign & -pi->misalign);
480 return MIN (max_align, align * BITS_PER_UNIT);
483 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
486 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
487 way, because it could contain a zero byte in the middle.
488 TREE_STRING_LENGTH is the size of the character array, not the string.
490 ONLY_VALUE should be nonzero if the result is not going to be emitted
491 into the instruction stream and zero if it is going to be expanded.
492 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
493 is returned, otherwise NULL, since
494 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
495 evaluate the side-effects.
497 The value returned is of type `ssizetype'.
499 Unfortunately, string_constant can't access the values of const char
500 arrays with initializers, so neither can we do so here. */
503 c_strlen (tree src, int only_value)
506 HOST_WIDE_INT offset;
512 if (TREE_CODE (src) == COND_EXPR
513 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
517 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
518 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
519 if (tree_int_cst_equal (len1, len2))
523 if (TREE_CODE (src) == COMPOUND_EXPR
524 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
525 return c_strlen (TREE_OPERAND (src, 1), only_value);
527 if (EXPR_HAS_LOCATION (src))
528 loc = EXPR_LOCATION (src);
530 loc = input_location;
532 src = string_constant (src, &offset_node);
536 max = TREE_STRING_LENGTH (src) - 1;
537 ptr = TREE_STRING_POINTER (src);
539 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
541 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
542 compute the offset to the following null if we don't know where to
543 start searching for it. */
546 for (i = 0; i < max; i++)
550 /* We don't know the starting offset, but we do know that the string
551 has no internal zero bytes. We can assume that the offset falls
552 within the bounds of the string; otherwise, the programmer deserves
553 what he gets. Subtract the offset from the length of the string,
554 and return that. This would perhaps not be valid if we were dealing
555 with named arrays in addition to literal string constants. */
557 return size_diffop_loc (loc, size_int (max), offset_node);
560 /* We have a known offset into the string. Start searching there for
561 a null character if we can represent it as a single HOST_WIDE_INT. */
562 if (offset_node == 0)
564 else if (! host_integerp (offset_node, 0))
567 offset = tree_low_cst (offset_node, 0);
569 /* If the offset is known to be out of bounds, warn, and call strlen at
571 if (offset < 0 || offset > max)
573 /* Suppress multiple warnings for propagated constant strings. */
574 if (! TREE_NO_WARNING (src))
576 warning_at (loc, 0, "offset outside bounds of constant string");
577 TREE_NO_WARNING (src) = 1;
582 /* Use strlen to search for the first zero byte. Since any strings
583 constructed with build_string will have nulls appended, we win even
584 if we get handed something like (char[4])"abcd".
586 Since OFFSET is our starting index into the string, no further
587 calculation is needed. */
588 return ssize_int (strlen (ptr + offset));
591 /* Return a char pointer for a C string if it is a string constant
592 or sum of string constant and integer constant. */
599 src = string_constant (src, &offset_node);
603 if (offset_node == 0)
604 return TREE_STRING_POINTER (src);
605 else if (!host_integerp (offset_node, 1)
606 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
609 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
612 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
613 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
616 c_readstr (const char *str, enum machine_mode mode)
622 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
627 for (i = 0; i < GET_MODE_SIZE (mode); i++)
630 if (WORDS_BIG_ENDIAN)
631 j = GET_MODE_SIZE (mode) - i - 1;
632 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
633 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
634 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
636 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
639 ch = (unsigned char) str[i];
640 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
642 return immed_double_const (c[0], c[1], mode);
645 /* Cast a target constant CST to target CHAR and if that value fits into
646 host char type, return zero and put that value into variable pointed to by
650 target_char_cast (tree cst, char *p)
652 unsigned HOST_WIDE_INT val, hostval;
654 if (!host_integerp (cst, 1)
655 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
658 val = tree_low_cst (cst, 1);
659 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
660 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
663 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
664 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
673 /* Similar to save_expr, but assumes that arbitrary code is not executed
674 in between the multiple evaluations. In particular, we assume that a
675 non-addressable local variable will not be modified. */
678 builtin_save_expr (tree exp)
680 if (TREE_ADDRESSABLE (exp) == 0
681 && (TREE_CODE (exp) == PARM_DECL
682 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
685 return save_expr (exp);
688 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
689 times to get the address of either a higher stack frame, or a return
690 address located within it (depending on FNDECL_CODE). */
693 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
697 #ifdef INITIAL_FRAME_ADDRESS_RTX
698 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
702 /* For a zero count with __builtin_return_address, we don't care what
703 frame address we return, because target-specific definitions will
704 override us. Therefore frame pointer elimination is OK, and using
705 the soft frame pointer is OK.
707 For a nonzero count, or a zero count with __builtin_frame_address,
708 we require a stable offset from the current frame pointer to the
709 previous one, so we must use the hard frame pointer, and
710 we must disable frame pointer elimination. */
711 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
712 tem = frame_pointer_rtx;
715 tem = hard_frame_pointer_rtx;
717 /* Tell reload not to eliminate the frame pointer. */
718 crtl->accesses_prior_frames = 1;
722 /* Some machines need special handling before we can access
723 arbitrary frames. For example, on the SPARC, we must first flush
724 all register windows to the stack. */
725 #ifdef SETUP_FRAME_ADDRESSES
727 SETUP_FRAME_ADDRESSES ();
730 /* On the SPARC, the return address is not in the frame, it is in a
731 register. There is no way to access it off of the current frame
732 pointer, but it can be accessed off the previous frame pointer by
733 reading the value from the register window save area. */
734 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
735 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
739 /* Scan back COUNT frames to the specified frame. */
740 for (i = 0; i < count; i++)
742 /* Assume the dynamic chain pointer is in the word that the
743 frame address points to, unless otherwise specified. */
744 #ifdef DYNAMIC_CHAIN_ADDRESS
745 tem = DYNAMIC_CHAIN_ADDRESS (tem);
747 tem = memory_address (Pmode, tem);
748 tem = gen_frame_mem (Pmode, tem);
749 tem = copy_to_reg (tem);
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
755 #ifdef FRAME_ADDR_RTX
756 return FRAME_ADDR_RTX (tem);
761 /* For __builtin_return_address, get the return address from that frame. */
762 #ifdef RETURN_ADDR_RTX
763 tem = RETURN_ADDR_RTX (count, tem);
765 tem = memory_address (Pmode,
766 plus_constant (tem, GET_MODE_SIZE (Pmode)));
767 tem = gen_frame_mem (Pmode, tem);
772 /* Alias set used for setjmp buffer. */
773 static alias_set_type setjmp_alias_set = -1;
775 /* Construct the leading half of a __builtin_setjmp call. Control will
776 return to RECEIVER_LABEL. This is also called directly by the SJLJ
777 exception handling code. */
780 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
782 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
786 if (setjmp_alias_set == -1)
787 setjmp_alias_set = new_alias_set ();
789 buf_addr = convert_memory_address (Pmode, buf_addr);
791 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
793 /* We store the frame pointer and the address of receiver_label in
794 the buffer and use the rest of it for the stack save area, which
795 is machine-dependent. */
797 mem = gen_rtx_MEM (Pmode, buf_addr);
798 set_mem_alias_set (mem, setjmp_alias_set);
799 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
801 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
802 set_mem_alias_set (mem, setjmp_alias_set);
804 emit_move_insn (validize_mem (mem),
805 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
807 stack_save = gen_rtx_MEM (sa_mode,
808 plus_constant (buf_addr,
809 2 * GET_MODE_SIZE (Pmode)));
810 set_mem_alias_set (stack_save, setjmp_alias_set);
811 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
813 /* If there is further processing to do, do it. */
814 #ifdef HAVE_builtin_setjmp_setup
815 if (HAVE_builtin_setjmp_setup)
816 emit_insn (gen_builtin_setjmp_setup (buf_addr));
819 /* Tell optimize_save_area_alloca that extra work is going to
820 need to go on during alloca. */
821 cfun->calls_setjmp = 1;
823 /* We have a nonlocal label. */
824 cfun->has_nonlocal_label = 1;
827 /* Construct the trailing part of a __builtin_setjmp call. This is
828 also called directly by the SJLJ exception handling code. */
831 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
835 /* Clobber the FP when we get here, so we have to make sure it's
836 marked as used by this function. */
837 emit_use (hard_frame_pointer_rtx);
839 /* Mark the static chain as clobbered here so life information
840 doesn't get messed up for it. */
841 chain = targetm.calls.static_chain (current_function_decl, true);
842 if (chain && REG_P (chain))
843 emit_clobber (chain);
845 /* Now put in the code to restore the frame pointer, and argument
846 pointer, if needed. */
847 #ifdef HAVE_nonlocal_goto
848 if (! HAVE_nonlocal_goto)
851 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
852 /* This might change the hard frame pointer in ways that aren't
853 apparent to early optimization passes, so force a clobber. */
854 emit_clobber (hard_frame_pointer_rtx);
857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
858 if (fixed_regs[ARG_POINTER_REGNUM])
860 #ifdef ELIMINABLE_REGS
862 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
864 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
865 if (elim_regs[i].from == ARG_POINTER_REGNUM
866 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
869 if (i == ARRAY_SIZE (elim_regs))
872 /* Now restore our arg pointer from the address at which it
873 was saved in our stack frame. */
874 emit_move_insn (crtl->args.internal_arg_pointer,
875 copy_to_reg (get_arg_pointer_save_area ()));
880 #ifdef HAVE_builtin_setjmp_receiver
881 if (HAVE_builtin_setjmp_receiver)
882 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
885 #ifdef HAVE_nonlocal_goto_receiver
886 if (HAVE_nonlocal_goto_receiver)
887 emit_insn (gen_nonlocal_goto_receiver ());
892 /* We must not allow the code we just generated to be reordered by
893 scheduling. Specifically, the update of the frame pointer must
894 happen immediately, not later. */
895 emit_insn (gen_blockage ());
898 /* __builtin_longjmp is passed a pointer to an array of five words (not
899 all will be used on all machines). It operates similarly to the C
900 library function of the same name, but is more efficient. Much of
901 the code below is copied from the handling of non-local gotos. */
904 expand_builtin_longjmp (rtx buf_addr, rtx value)
906 rtx fp, lab, stack, insn, last;
907 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
909 /* DRAP is needed for stack realign if longjmp is expanded to current
911 if (SUPPORTS_STACK_ALIGNMENT)
912 crtl->need_drap = true;
914 if (setjmp_alias_set == -1)
915 setjmp_alias_set = new_alias_set ();
917 buf_addr = convert_memory_address (Pmode, buf_addr);
919 buf_addr = force_reg (Pmode, buf_addr);
921 /* We require that the user must pass a second argument of 1, because
922 that is what builtin_setjmp will return. */
923 gcc_assert (value == const1_rtx);
925 last = get_last_insn ();
926 #ifdef HAVE_builtin_longjmp
927 if (HAVE_builtin_longjmp)
928 emit_insn (gen_builtin_longjmp (buf_addr));
932 fp = gen_rtx_MEM (Pmode, buf_addr);
933 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
934 GET_MODE_SIZE (Pmode)));
936 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
937 2 * GET_MODE_SIZE (Pmode)));
938 set_mem_alias_set (fp, setjmp_alias_set);
939 set_mem_alias_set (lab, setjmp_alias_set);
940 set_mem_alias_set (stack, setjmp_alias_set);
942 /* Pick up FP, label, and SP from the block and jump. This code is
943 from expand_goto in stmt.c; see there for detailed comments. */
944 #ifdef HAVE_nonlocal_goto
945 if (HAVE_nonlocal_goto)
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
953 lab = copy_to_reg (lab);
955 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
956 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
958 emit_move_insn (hard_frame_pointer_rtx, fp);
959 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
963 emit_indirect_jump (lab);
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 gcc_assert (insn != last);
978 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
981 else if (CALL_P (insn))
986 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
987 and the address of the save area. */
990 expand_builtin_nonlocal_goto (tree exp)
992 tree t_label, t_save_area;
993 rtx r_label, r_save_area, r_fp, r_sp, insn;
995 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
998 t_label = CALL_EXPR_ARG (exp, 0);
999 t_save_area = CALL_EXPR_ARG (exp, 1);
1001 r_label = expand_normal (t_label);
1002 r_label = convert_memory_address (Pmode, r_label);
1003 r_save_area = expand_normal (t_save_area);
1004 r_save_area = convert_memory_address (Pmode, r_save_area);
1005 /* Copy the address of the save location to a register just in case it was based
1006 on the frame pointer. */
1007 r_save_area = copy_to_reg (r_save_area);
1008 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1009 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1010 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1012 crtl->has_nonlocal_goto = 1;
1014 #ifdef HAVE_nonlocal_goto
1015 /* ??? We no longer need to pass the static chain value, afaik. */
1016 if (HAVE_nonlocal_goto)
1017 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1021 r_label = copy_to_reg (r_label);
1023 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1024 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1026 /* Restore frame pointer for containing function.
1027 This sets the actual hard register used for the frame pointer
1028 to the location of the function's incoming static chain info.
1029 The non-local goto handler will then adjust it to contain the
1030 proper value and reload the argument pointer, if needed. */
1031 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1032 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1034 /* USE of hard_frame_pointer_rtx added for consistency;
1035 not clear if really needed. */
1036 emit_use (hard_frame_pointer_rtx);
1037 emit_use (stack_pointer_rtx);
1039 /* If the architecture is using a GP register, we must
1040 conservatively assume that the target function makes use of it.
1041 The prologue of functions with nonlocal gotos must therefore
1042 initialize the GP register to the appropriate value, and we
1043 must then make sure that this value is live at the point
1044 of the jump. (Note that this doesn't necessarily apply
1045 to targets with a nonlocal_goto pattern; they are free
1046 to implement it in their own way. Note also that this is
1047 a no-op if the GP register is a global invariant.) */
1048 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1049 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1050 emit_use (pic_offset_table_rtx);
1052 emit_indirect_jump (r_label);
1055 /* Search backwards to the jump insn and mark it as a
1057 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1061 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1064 else if (CALL_P (insn))
1071 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1072 (not all will be used on all machines) that was passed to __builtin_setjmp.
1073 It updates the stack pointer in that block to correspond to the current
1077 expand_builtin_update_setjmp_buf (rtx buf_addr)
1079 enum machine_mode sa_mode = Pmode;
1083 #ifdef HAVE_save_stack_nonlocal
1084 if (HAVE_save_stack_nonlocal)
1085 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1087 #ifdef STACK_SAVEAREA_MODE
1088 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1092 = gen_rtx_MEM (sa_mode,
1095 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1099 emit_insn (gen_setjmp ());
1102 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1105 /* Expand a call to __builtin_prefetch. For a target that does not support
1106 data prefetch, evaluate the memory address argument in case it has side
1110 expand_builtin_prefetch (tree exp)
1112 tree arg0, arg1, arg2;
1116 if (!validate_arglist (exp, POINTER_TYPE, 0))
1119 arg0 = CALL_EXPR_ARG (exp, 0);
1121 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1122 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1124 nargs = call_expr_nargs (exp);
1126 arg1 = CALL_EXPR_ARG (exp, 1);
1128 arg1 = integer_zero_node;
1130 arg2 = CALL_EXPR_ARG (exp, 2);
1132 arg2 = integer_three_node;
1134 /* Argument 0 is an address. */
1135 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1137 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1138 if (TREE_CODE (arg1) != INTEGER_CST)
1140 error ("second argument to %<__builtin_prefetch%> must be a constant");
1141 arg1 = integer_zero_node;
1143 op1 = expand_normal (arg1);
1144 /* Argument 1 must be either zero or one. */
1145 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1147 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1152 /* Argument 2 (locality) must be a compile-time constant int. */
1153 if (TREE_CODE (arg2) != INTEGER_CST)
1155 error ("third argument to %<__builtin_prefetch%> must be a constant");
1156 arg2 = integer_zero_node;
1158 op2 = expand_normal (arg2);
1159 /* Argument 2 must be 0, 1, 2, or 3. */
1160 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1162 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1166 #ifdef HAVE_prefetch
1169 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1171 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1172 || (GET_MODE (op0) != Pmode))
1174 op0 = convert_memory_address (Pmode, op0);
1175 op0 = force_reg (Pmode, op0);
1177 emit_insn (gen_prefetch (op0, op1, op2));
1181 /* Don't do anything with direct references to volatile memory, but
1182 generate code to handle other side effects. */
1183 if (!MEM_P (op0) && side_effects_p (op0))
1187 /* Get a MEM rtx for expression EXP which is the address of an operand
1188 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1189 the maximum length of the block of memory that might be accessed or
1193 get_memory_rtx (tree exp, tree len)
1195 tree orig_exp = exp;
1199 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1200 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1201 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1202 exp = TREE_OPERAND (exp, 0);
1204 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1205 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1207 /* Get an expression we can use to find the attributes to assign to MEM.
1208 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1209 we can. First remove any nops. */
1210 while (CONVERT_EXPR_P (exp)
1211 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1212 exp = TREE_OPERAND (exp, 0);
1215 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1216 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1217 && host_integerp (TREE_OPERAND (exp, 1), 0)
1218 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1219 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1220 else if (TREE_CODE (exp) == ADDR_EXPR)
1221 exp = TREE_OPERAND (exp, 0);
1222 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1223 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1227 /* Honor attributes derived from exp, except for the alias set
1228 (as builtin stringops may alias with anything) and the size
1229 (as stringops may access multiple array elements). */
1232 set_mem_attributes (mem, exp, 0);
1235 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1237 /* Allow the string and memory builtins to overflow from one
1238 field into another, see http://gcc.gnu.org/PR23561.
1239 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1240 memory accessed by the string or memory builtin will fit
1241 within the field. */
1242 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1244 tree mem_expr = MEM_EXPR (mem);
1245 HOST_WIDE_INT offset = -1, length = -1;
1248 while (TREE_CODE (inner) == ARRAY_REF
1249 || CONVERT_EXPR_P (inner)
1250 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1251 || TREE_CODE (inner) == SAVE_EXPR)
1252 inner = TREE_OPERAND (inner, 0);
1254 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1256 if (MEM_OFFSET (mem)
1257 && CONST_INT_P (MEM_OFFSET (mem)))
1258 offset = INTVAL (MEM_OFFSET (mem));
1260 if (offset >= 0 && len && host_integerp (len, 0))
1261 length = tree_low_cst (len, 0);
1263 while (TREE_CODE (inner) == COMPONENT_REF)
1265 tree field = TREE_OPERAND (inner, 1);
1266 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1267 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1269 /* Bitfields are generally not byte-addressable. */
1270 gcc_assert (!DECL_BIT_FIELD (field)
1271 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1272 % BITS_PER_UNIT) == 0
1273 && host_integerp (DECL_SIZE (field), 0)
1274 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1275 % BITS_PER_UNIT) == 0));
1277 /* If we can prove that the memory starting at XEXP (mem, 0) and
1278 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1279 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1280 fields without DECL_SIZE_UNIT like flexible array members. */
1282 && DECL_SIZE_UNIT (field)
1283 && host_integerp (DECL_SIZE_UNIT (field), 0))
1286 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1289 && offset + length <= size)
1294 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1295 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1304 mem_expr = TREE_OPERAND (mem_expr, 0);
1305 inner = TREE_OPERAND (inner, 0);
1308 if (mem_expr == NULL)
1310 if (mem_expr != MEM_EXPR (mem))
1312 set_mem_expr (mem, mem_expr);
1313 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1316 set_mem_alias_set (mem, 0);
1317 set_mem_size (mem, NULL_RTX);
1323 /* Built-in functions to perform an untyped call and return. */
1325 #define apply_args_mode \
1326 (this_target_builtins->x_apply_args_mode)
1327 #define apply_result_mode \
1328 (this_target_builtins->x_apply_result_mode)
1330 /* Return the size required for the block returned by __builtin_apply_args,
1331 and initialize apply_args_mode. */
1334 apply_args_size (void)
1336 static int size = -1;
1339 enum machine_mode mode;
1341 /* The values computed by this function never change. */
1344 /* The first value is the incoming arg-pointer. */
1345 size = GET_MODE_SIZE (Pmode);
1347 /* The second value is the structure value address unless this is
1348 passed as an "invisible" first argument. */
1349 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1350 size += GET_MODE_SIZE (Pmode);
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (FUNCTION_ARG_REGNO_P (regno))
1355 mode = reg_raw_mode[regno];
1357 gcc_assert (mode != VOIDmode);
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_args_mode[regno] = mode;
1367 apply_args_mode[regno] = VOIDmode;
1373 /* Return the size required for the block returned by __builtin_apply,
1374 and initialize apply_result_mode. */
1377 apply_result_size (void)
1379 static int size = -1;
1381 enum machine_mode mode;
1383 /* The values computed by this function never change. */
1388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1389 if (targetm.calls.function_value_regno_p (regno))
1391 mode = reg_raw_mode[regno];
1393 gcc_assert (mode != VOIDmode);
1395 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1396 if (size % align != 0)
1397 size = CEIL (size, align) * align;
1398 size += GET_MODE_SIZE (mode);
1399 apply_result_mode[regno] = mode;
1402 apply_result_mode[regno] = VOIDmode;
1404 /* Allow targets that use untyped_call and untyped_return to override
1405 the size so that machine-specific information can be stored here. */
1406 #ifdef APPLY_RESULT_SIZE
1407 size = APPLY_RESULT_SIZE;
1413 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1414 /* Create a vector describing the result block RESULT. If SAVEP is true,
1415 the result block is used to save the values; otherwise it is used to
1416 restore the values. */
1419 result_vector (int savep, rtx result)
1421 int regno, size, align, nelts;
1422 enum machine_mode mode;
1424 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if ((mode = apply_result_mode[regno]) != VOIDmode)
1430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1431 if (size % align != 0)
1432 size = CEIL (size, align) * align;
1433 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1434 mem = adjust_address (result, mode, size);
1435 savevec[nelts++] = (savep
1436 ? gen_rtx_SET (VOIDmode, mem, reg)
1437 : gen_rtx_SET (VOIDmode, reg, mem));
1438 size += GET_MODE_SIZE (mode);
1440 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1442 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1444 /* Save the state required to perform an untyped call with the same
1445 arguments as were passed to the current function. */
1448 expand_builtin_apply_args_1 (void)
1451 int size, align, regno;
1452 enum machine_mode mode;
1453 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1455 /* Create a block where the arg-pointer, structure value address,
1456 and argument registers can be saved. */
1457 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1459 /* Walk past the arg-pointer and structure value address. */
1460 size = GET_MODE_SIZE (Pmode);
1461 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1462 size += GET_MODE_SIZE (Pmode);
1464 /* Save each register used in calling a function to the block. */
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_args_mode[regno]) != VOIDmode)
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1472 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1474 emit_move_insn (adjust_address (registers, mode, size), tem);
1475 size += GET_MODE_SIZE (mode);
1478 /* Save the arg pointer to the block. */
1479 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1480 #ifdef STACK_GROWS_DOWNWARD
1481 /* We need the pointer as the caller actually passed them to us, not
1482 as we might have pretended they were passed. Make sure it's a valid
1483 operand, as emit_move_insn isn't expected to handle a PLUS. */
1485 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1488 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1490 size = GET_MODE_SIZE (Pmode);
1492 /* Save the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 if (struct_incoming_value)
1496 emit_move_insn (adjust_address (registers, Pmode, size),
1497 copy_to_reg (struct_incoming_value));
1498 size += GET_MODE_SIZE (Pmode);
1501 /* Return the address of the block. */
1502 return copy_addr_to_reg (XEXP (registers, 0));
1505 /* __builtin_apply_args returns block of memory allocated on
1506 the stack into which is stored the arg pointer, structure
1507 value address, static chain, and all the registers that might
1508 possibly be used in performing a function call. The code is
1509 moved to the start of the function so the incoming values are
1513 expand_builtin_apply_args (void)
1515 /* Don't do __builtin_apply_args more than once in a function.
1516 Save the result of the first call and reuse it. */
1517 if (apply_args_value != 0)
1518 return apply_args_value;
1520 /* When this function is called, it means that registers must be
1521 saved on entry to this function. So we migrate the
1522 call to the first insn of this function. */
1527 temp = expand_builtin_apply_args_1 ();
1531 apply_args_value = temp;
1533 /* Put the insns after the NOTE that starts the function.
1534 If this is inside a start_sequence, make the outer-level insn
1535 chain current, so the code is placed at the start of the
1536 function. If internal_arg_pointer is a non-virtual pseudo,
1537 it needs to be placed after the function that initializes
1539 push_topmost_sequence ();
1540 if (REG_P (crtl->args.internal_arg_pointer)
1541 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1542 emit_insn_before (seq, parm_birth_insn);
1544 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1545 pop_topmost_sequence ();
1550 /* Perform an untyped call and save the state required to perform an
1551 untyped return of whatever value was returned by the given function. */
1554 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1556 int size, align, regno;
1557 enum machine_mode mode;
1558 rtx incoming_args, result, reg, dest, src, call_insn;
1559 rtx old_stack_level = 0;
1560 rtx call_fusage = 0;
1561 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1563 arguments = convert_memory_address (Pmode, arguments);
1565 /* Create a block where the return registers can be saved. */
1566 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1568 /* Fetch the arg pointer from the ARGUMENTS block. */
1569 incoming_args = gen_reg_rtx (Pmode);
1570 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1571 #ifndef STACK_GROWS_DOWNWARD
1572 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1573 incoming_args, 0, OPTAB_LIB_WIDEN);
1576 /* Push a new argument block and copy the arguments. Do not allow
1577 the (potential) memcpy call below to interfere with our stack
1579 do_pending_stack_adjust ();
1582 /* Save the stack with nonlocal if available. */
1583 #ifdef HAVE_save_stack_nonlocal
1584 if (HAVE_save_stack_nonlocal)
1585 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1588 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. */
1592 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1594 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1595 may have already set current_function_calls_alloca to true.
1596 current_function_calls_alloca won't be set if argsize is zero,
1597 so we have to guarantee need_drap is true here. */
1598 if (SUPPORTS_STACK_ALIGNMENT)
1599 crtl->need_drap = true;
1601 dest = virtual_outgoing_args_rtx;
1602 #ifndef STACK_GROWS_DOWNWARD
1603 if (CONST_INT_P (argsize))
1604 dest = plus_constant (dest, -INTVAL (argsize));
1606 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1608 dest = gen_rtx_MEM (BLKmode, dest);
1609 set_mem_align (dest, PARM_BOUNDARY);
1610 src = gen_rtx_MEM (BLKmode, incoming_args);
1611 set_mem_align (src, PARM_BOUNDARY);
1612 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1614 /* Refer to the argument block. */
1616 arguments = gen_rtx_MEM (BLKmode, arguments);
1617 set_mem_align (arguments, PARM_BOUNDARY);
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
1622 size += GET_MODE_SIZE (Pmode);
1624 /* Restore each of the registers previously saved. Make USE insns
1625 for each of these registers for use in making the call. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_args_mode[regno]) != VOIDmode)
1629 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 if (size % align != 0)
1631 size = CEIL (size, align) * align;
1632 reg = gen_rtx_REG (mode, regno);
1633 emit_move_insn (reg, adjust_address (arguments, mode, size));
1634 use_reg (&call_fusage, reg);
1635 size += GET_MODE_SIZE (mode);
1638 /* Restore the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 size = GET_MODE_SIZE (Pmode);
1643 rtx value = gen_reg_rtx (Pmode);
1644 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1645 emit_move_insn (struct_value, value);
1646 if (REG_P (struct_value))
1647 use_reg (&call_fusage, struct_value);
1648 size += GET_MODE_SIZE (Pmode);
1651 /* All arguments and registers used for the call are set up by now! */
1652 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1654 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1655 and we don't want to load it into a register as an optimization,
1656 because prepare_call_address already did it if it should be done. */
1657 if (GET_CODE (function) != SYMBOL_REF)
1658 function = memory_address (FUNCTION_MODE, function);
1660 /* Generate the actual call instruction and save the return value. */
1661 #ifdef HAVE_untyped_call
1662 if (HAVE_untyped_call)
1663 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1664 result, result_vector (1, result)));
1667 #ifdef HAVE_call_value
1668 if (HAVE_call_value)
1672 /* Locate the unique return register. It is not possible to
1673 express a call that sets more than one return register using
1674 call_value; use untyped_call for that. In fact, untyped_call
1675 only needs to save the return registers in the given block. */
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1681 valreg = gen_rtx_REG (mode, regno);
1684 emit_call_insn (GEN_CALL_VALUE (valreg,
1685 gen_rtx_MEM (FUNCTION_MODE, function),
1686 const0_rtx, NULL_RTX, const0_rtx));
1688 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1694 /* Find the CALL insn we just emitted, and attach the register usage
1696 call_insn = last_call_insn ();
1697 add_function_usage_to (call_insn, call_fusage);
1699 /* Restore the stack. */
1700 #ifdef HAVE_save_stack_nonlocal
1701 if (HAVE_save_stack_nonlocal)
1702 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1705 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1709 /* Return the address of the result block. */
1710 result = copy_addr_to_reg (XEXP (result, 0));
1711 return convert_memory_address (ptr_mode, result);
1714 /* Perform an untyped return. */
1717 expand_builtin_return (rtx result)
1719 int size, align, regno;
1720 enum machine_mode mode;
1722 rtx call_fusage = 0;
1724 result = convert_memory_address (Pmode, result);
1726 apply_result_size ();
1727 result = gen_rtx_MEM (BLKmode, result);
1729 #ifdef HAVE_untyped_return
1730 if (HAVE_untyped_return)
1732 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1738 /* Restore the return value and note that each value is used. */
1740 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1741 if ((mode = apply_result_mode[regno]) != VOIDmode)
1743 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1744 if (size % align != 0)
1745 size = CEIL (size, align) * align;
1746 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1747 emit_move_insn (reg, adjust_address (result, mode, size));
1749 push_to_sequence (call_fusage);
1751 call_fusage = get_insns ();
1753 size += GET_MODE_SIZE (mode);
1756 /* Put the USE insns before the return. */
1757 emit_insn (call_fusage);
1759 /* Return whatever values was restored by jumping directly to the end
1761 expand_naked_return ();
1764 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1766 static enum type_class
1767 type_to_class (tree type)
1769 switch (TREE_CODE (type))
1771 case VOID_TYPE: return void_type_class;
1772 case INTEGER_TYPE: return integer_type_class;
1773 case ENUMERAL_TYPE: return enumeral_type_class;
1774 case BOOLEAN_TYPE: return boolean_type_class;
1775 case POINTER_TYPE: return pointer_type_class;
1776 case REFERENCE_TYPE: return reference_type_class;
1777 case OFFSET_TYPE: return offset_type_class;
1778 case REAL_TYPE: return real_type_class;
1779 case COMPLEX_TYPE: return complex_type_class;
1780 case FUNCTION_TYPE: return function_type_class;
1781 case METHOD_TYPE: return method_type_class;
1782 case RECORD_TYPE: return record_type_class;
1784 case QUAL_UNION_TYPE: return union_type_class;
1785 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1786 ? string_type_class : array_type_class);
1787 case LANG_TYPE: return lang_type_class;
1788 default: return no_type_class;
1792 /* Expand a call EXP to __builtin_classify_type. */
1795 expand_builtin_classify_type (tree exp)
1797 if (call_expr_nargs (exp))
1798 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1799 return GEN_INT (no_type_class);
1802 /* This helper macro, meant to be used in mathfn_built_in below,
1803 determines which among a set of three builtin math functions is
1804 appropriate for a given type mode. The `F' and `L' cases are
1805 automatically generated from the `double' case. */
1806 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1807 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1808 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1809 fcodel = BUILT_IN_MATHFN##L ; break;
1810 /* Similar to above, but appends _R after any F/L suffix. */
1811 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1812 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1813 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1814 fcodel = BUILT_IN_MATHFN##L_R ; break;
1816 /* Return mathematic function equivalent to FN but operating directly
1817 on TYPE, if available. If IMPLICIT is true find the function in
1818 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1819 can't do the conversion, return zero. */
1822 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1824 tree const *const fn_arr
1825 = implicit ? implicit_built_in_decls : built_in_decls;
1826 enum built_in_function fcode, fcodef, fcodel;
1830 CASE_MATHFN (BUILT_IN_ACOS)
1831 CASE_MATHFN (BUILT_IN_ACOSH)
1832 CASE_MATHFN (BUILT_IN_ASIN)
1833 CASE_MATHFN (BUILT_IN_ASINH)
1834 CASE_MATHFN (BUILT_IN_ATAN)
1835 CASE_MATHFN (BUILT_IN_ATAN2)
1836 CASE_MATHFN (BUILT_IN_ATANH)
1837 CASE_MATHFN (BUILT_IN_CBRT)
1838 CASE_MATHFN (BUILT_IN_CEIL)
1839 CASE_MATHFN (BUILT_IN_CEXPI)
1840 CASE_MATHFN (BUILT_IN_COPYSIGN)
1841 CASE_MATHFN (BUILT_IN_COS)
1842 CASE_MATHFN (BUILT_IN_COSH)
1843 CASE_MATHFN (BUILT_IN_DREM)
1844 CASE_MATHFN (BUILT_IN_ERF)
1845 CASE_MATHFN (BUILT_IN_ERFC)
1846 CASE_MATHFN (BUILT_IN_EXP)
1847 CASE_MATHFN (BUILT_IN_EXP10)
1848 CASE_MATHFN (BUILT_IN_EXP2)
1849 CASE_MATHFN (BUILT_IN_EXPM1)
1850 CASE_MATHFN (BUILT_IN_FABS)
1851 CASE_MATHFN (BUILT_IN_FDIM)
1852 CASE_MATHFN (BUILT_IN_FLOOR)
1853 CASE_MATHFN (BUILT_IN_FMA)
1854 CASE_MATHFN (BUILT_IN_FMAX)
1855 CASE_MATHFN (BUILT_IN_FMIN)
1856 CASE_MATHFN (BUILT_IN_FMOD)
1857 CASE_MATHFN (BUILT_IN_FREXP)
1858 CASE_MATHFN (BUILT_IN_GAMMA)
1859 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1860 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1861 CASE_MATHFN (BUILT_IN_HYPOT)
1862 CASE_MATHFN (BUILT_IN_ILOGB)
1863 CASE_MATHFN (BUILT_IN_INF)
1864 CASE_MATHFN (BUILT_IN_ISINF)
1865 CASE_MATHFN (BUILT_IN_J0)
1866 CASE_MATHFN (BUILT_IN_J1)
1867 CASE_MATHFN (BUILT_IN_JN)
1868 CASE_MATHFN (BUILT_IN_LCEIL)
1869 CASE_MATHFN (BUILT_IN_LDEXP)
1870 CASE_MATHFN (BUILT_IN_LFLOOR)
1871 CASE_MATHFN (BUILT_IN_LGAMMA)
1872 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1873 CASE_MATHFN (BUILT_IN_LLCEIL)
1874 CASE_MATHFN (BUILT_IN_LLFLOOR)
1875 CASE_MATHFN (BUILT_IN_LLRINT)
1876 CASE_MATHFN (BUILT_IN_LLROUND)
1877 CASE_MATHFN (BUILT_IN_LOG)
1878 CASE_MATHFN (BUILT_IN_LOG10)
1879 CASE_MATHFN (BUILT_IN_LOG1P)
1880 CASE_MATHFN (BUILT_IN_LOG2)
1881 CASE_MATHFN (BUILT_IN_LOGB)
1882 CASE_MATHFN (BUILT_IN_LRINT)
1883 CASE_MATHFN (BUILT_IN_LROUND)
1884 CASE_MATHFN (BUILT_IN_MODF)
1885 CASE_MATHFN (BUILT_IN_NAN)
1886 CASE_MATHFN (BUILT_IN_NANS)
1887 CASE_MATHFN (BUILT_IN_NEARBYINT)
1888 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1889 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1890 CASE_MATHFN (BUILT_IN_POW)
1891 CASE_MATHFN (BUILT_IN_POWI)
1892 CASE_MATHFN (BUILT_IN_POW10)
1893 CASE_MATHFN (BUILT_IN_REMAINDER)
1894 CASE_MATHFN (BUILT_IN_REMQUO)
1895 CASE_MATHFN (BUILT_IN_RINT)
1896 CASE_MATHFN (BUILT_IN_ROUND)
1897 CASE_MATHFN (BUILT_IN_SCALB)
1898 CASE_MATHFN (BUILT_IN_SCALBLN)
1899 CASE_MATHFN (BUILT_IN_SCALBN)
1900 CASE_MATHFN (BUILT_IN_SIGNBIT)
1901 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1902 CASE_MATHFN (BUILT_IN_SIN)
1903 CASE_MATHFN (BUILT_IN_SINCOS)
1904 CASE_MATHFN (BUILT_IN_SINH)
1905 CASE_MATHFN (BUILT_IN_SQRT)
1906 CASE_MATHFN (BUILT_IN_TAN)
1907 CASE_MATHFN (BUILT_IN_TANH)
1908 CASE_MATHFN (BUILT_IN_TGAMMA)
1909 CASE_MATHFN (BUILT_IN_TRUNC)
1910 CASE_MATHFN (BUILT_IN_Y0)
1911 CASE_MATHFN (BUILT_IN_Y1)
1912 CASE_MATHFN (BUILT_IN_YN)
1918 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1919 return fn_arr[fcode];
1920 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1921 return fn_arr[fcodef];
1922 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1923 return fn_arr[fcodel];
1928 /* Like mathfn_built_in_1(), but always use the implicit array. */
1931 mathfn_built_in (tree type, enum built_in_function fn)
1933 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1936 /* If errno must be maintained, expand the RTL to check if the result,
1937 TARGET, of a built-in function call, EXP, is NaN, and if so set
1941 expand_errno_check (tree exp, rtx target)
1943 rtx lab = gen_label_rtx ();
1945 /* Test the result; if it is NaN, set errno=EDOM because
1946 the argument was not in the domain. */
1947 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1948 NULL_RTX, NULL_RTX, lab,
1949 /* The jump is very likely. */
1950 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1953 /* If this built-in doesn't throw an exception, set errno directly. */
1954 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1956 #ifdef GEN_ERRNO_RTX
1957 rtx errno_rtx = GEN_ERRNO_RTX;
1960 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1962 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1968 /* Make sure the library call isn't expanded as a tail call. */
1969 CALL_EXPR_TAILCALL (exp) = 0;
1971 /* We can't set errno=EDOM directly; let the library call do it.
1972 Pop the arguments right away in case the call gets deleted. */
1974 expand_call (exp, target, 0);
1979 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1980 Return NULL_RTX if a normal call should be emitted rather than expanding
1981 the function in-line. EXP is the expression that is a call to the builtin
1982 function; if convenient, the result should be placed in TARGET.
1983 SUBTARGET may be used as the target for computing one of EXP's operands. */
1986 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1988 optab builtin_optab;
1990 tree fndecl = get_callee_fndecl (exp);
1991 enum machine_mode mode;
1992 bool errno_set = false;
1995 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1998 arg = CALL_EXPR_ARG (exp, 0);
2000 switch (DECL_FUNCTION_CODE (fndecl))
2002 CASE_FLT_FN (BUILT_IN_SQRT):
2003 errno_set = ! tree_expr_nonnegative_p (arg);
2004 builtin_optab = sqrt_optab;
2006 CASE_FLT_FN (BUILT_IN_EXP):
2007 errno_set = true; builtin_optab = exp_optab; break;
2008 CASE_FLT_FN (BUILT_IN_EXP10):
2009 CASE_FLT_FN (BUILT_IN_POW10):
2010 errno_set = true; builtin_optab = exp10_optab; break;
2011 CASE_FLT_FN (BUILT_IN_EXP2):
2012 errno_set = true; builtin_optab = exp2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_EXPM1):
2014 errno_set = true; builtin_optab = expm1_optab; break;
2015 CASE_FLT_FN (BUILT_IN_LOGB):
2016 errno_set = true; builtin_optab = logb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_LOG):
2018 errno_set = true; builtin_optab = log_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOG10):
2020 errno_set = true; builtin_optab = log10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG2):
2022 errno_set = true; builtin_optab = log2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG1P):
2024 errno_set = true; builtin_optab = log1p_optab; break;
2025 CASE_FLT_FN (BUILT_IN_ASIN):
2026 builtin_optab = asin_optab; break;
2027 CASE_FLT_FN (BUILT_IN_ACOS):
2028 builtin_optab = acos_optab; break;
2029 CASE_FLT_FN (BUILT_IN_TAN):
2030 builtin_optab = tan_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN):
2032 builtin_optab = atan_optab; break;
2033 CASE_FLT_FN (BUILT_IN_FLOOR):
2034 builtin_optab = floor_optab; break;
2035 CASE_FLT_FN (BUILT_IN_CEIL):
2036 builtin_optab = ceil_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TRUNC):
2038 builtin_optab = btrunc_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ROUND):
2040 builtin_optab = round_optab; break;
2041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2042 builtin_optab = nearbyint_optab;
2043 if (flag_trapping_math)
2045 /* Else fallthrough and expand as rint. */
2046 CASE_FLT_FN (BUILT_IN_RINT):
2047 builtin_optab = rint_optab; break;
2048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2049 builtin_optab = significand_optab; break;
2054 /* Make a suitable register to place result in. */
2055 mode = TYPE_MODE (TREE_TYPE (exp));
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2060 /* Before working hard, check whether the instruction is available. */
2061 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2063 target = gen_reg_rtx (mode);
2065 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2066 need to expand the argument again. This way, we will not perform
2067 side-effects more the once. */
2068 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2070 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2074 /* Compute into TARGET.
2075 Set TARGET to wherever the result comes back. */
2076 target = expand_unop (mode, builtin_optab, op0, target, 0);
2081 expand_errno_check (exp, target);
2083 /* Output the entire sequence. */
2084 insns = get_insns ();
2090 /* If we were unable to expand via the builtin, stop the sequence
2091 (without outputting the insns) and call to the library function
2092 with the stabilized argument list. */
2096 return expand_call (exp, target, target == const0_rtx);
2099 /* Expand a call to the builtin binary math functions (pow and atan2).
2100 Return NULL_RTX if a normal call should be emitted rather than expanding the
2101 function in-line. EXP is the expression that is a call to the builtin
2102 function; if convenient, the result should be placed in TARGET.
2103 SUBTARGET may be used as the target for computing one of EXP's
2107 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2109 optab builtin_optab;
2110 rtx op0, op1, insns;
2111 int op1_type = REAL_TYPE;
2112 tree fndecl = get_callee_fndecl (exp);
2114 enum machine_mode mode;
2115 bool errno_set = true;
2117 switch (DECL_FUNCTION_CODE (fndecl))
2119 CASE_FLT_FN (BUILT_IN_SCALBN):
2120 CASE_FLT_FN (BUILT_IN_SCALBLN):
2121 CASE_FLT_FN (BUILT_IN_LDEXP):
2122 op1_type = INTEGER_TYPE;
2127 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2130 arg0 = CALL_EXPR_ARG (exp, 0);
2131 arg1 = CALL_EXPR_ARG (exp, 1);
2133 switch (DECL_FUNCTION_CODE (fndecl))
2135 CASE_FLT_FN (BUILT_IN_POW):
2136 builtin_optab = pow_optab; break;
2137 CASE_FLT_FN (BUILT_IN_ATAN2):
2138 builtin_optab = atan2_optab; break;
2139 CASE_FLT_FN (BUILT_IN_SCALB):
2140 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2142 builtin_optab = scalb_optab; break;
2143 CASE_FLT_FN (BUILT_IN_SCALBN):
2144 CASE_FLT_FN (BUILT_IN_SCALBLN):
2145 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2147 /* Fall through... */
2148 CASE_FLT_FN (BUILT_IN_LDEXP):
2149 builtin_optab = ldexp_optab; break;
2150 CASE_FLT_FN (BUILT_IN_FMOD):
2151 builtin_optab = fmod_optab; break;
2152 CASE_FLT_FN (BUILT_IN_REMAINDER):
2153 CASE_FLT_FN (BUILT_IN_DREM):
2154 builtin_optab = remainder_optab; break;
2159 /* Make a suitable register to place result in. */
2160 mode = TYPE_MODE (TREE_TYPE (exp));
2162 /* Before working hard, check whether the instruction is available. */
2163 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2166 target = gen_reg_rtx (mode);
2168 if (! flag_errno_math || ! HONOR_NANS (mode))
2171 /* Always stabilize the argument list. */
2172 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2173 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2175 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2176 op1 = expand_normal (arg1);
2180 /* Compute into TARGET.
2181 Set TARGET to wherever the result comes back. */
2182 target = expand_binop (mode, builtin_optab, op0, op1,
2183 target, 0, OPTAB_DIRECT);
2185 /* If we were unable to expand via the builtin, stop the sequence
2186 (without outputting the insns) and call to the library function
2187 with the stabilized argument list. */
2191 return expand_call (exp, target, target == const0_rtx);
2195 expand_errno_check (exp, target);
2197 /* Output the entire sequence. */
2198 insns = get_insns ();
2205 /* Expand a call to the builtin sin and cos math functions.
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2213 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2215 optab builtin_optab;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2221 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2224 arg = CALL_EXPR_ARG (exp, 0);
2226 switch (DECL_FUNCTION_CODE (fndecl))
2228 CASE_FLT_FN (BUILT_IN_SIN):
2229 CASE_FLT_FN (BUILT_IN_COS):
2230 builtin_optab = sincos_optab; break;
2235 /* Make a suitable register to place result in. */
2236 mode = TYPE_MODE (TREE_TYPE (exp));
2238 /* Check if sincos insn is available, otherwise fallback
2239 to sin or cos insn. */
2240 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2241 switch (DECL_FUNCTION_CODE (fndecl))
2243 CASE_FLT_FN (BUILT_IN_SIN):
2244 builtin_optab = sin_optab; break;
2245 CASE_FLT_FN (BUILT_IN_COS):
2246 builtin_optab = cos_optab; break;
2251 /* Before working hard, check whether the instruction is available. */
2252 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2254 target = gen_reg_rtx (mode);
2256 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2257 need to expand the argument again. This way, we will not perform
2258 side-effects more the once. */
2259 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2261 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2265 /* Compute into TARGET.
2266 Set TARGET to wherever the result comes back. */
2267 if (builtin_optab == sincos_optab)
2271 switch (DECL_FUNCTION_CODE (fndecl))
2273 CASE_FLT_FN (BUILT_IN_SIN):
2274 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2276 CASE_FLT_FN (BUILT_IN_COS):
2277 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2282 gcc_assert (result);
2286 target = expand_unop (mode, builtin_optab, op0, target, 0);
2291 /* Output the entire sequence. */
2292 insns = get_insns ();
2298 /* If we were unable to expand via the builtin, stop the sequence
2299 (without outputting the insns) and call to the library function
2300 with the stabilized argument list. */
2304 target = expand_call (exp, target, target == const0_rtx);
2309 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2310 return an RTL instruction code that implements the functionality.
2311 If that isn't possible or available return CODE_FOR_nothing. */
2313 static enum insn_code
2314 interclass_mathfn_icode (tree arg, tree fndecl)
2316 bool errno_set = false;
2317 optab builtin_optab = 0;
2318 enum machine_mode mode;
2320 switch (DECL_FUNCTION_CODE (fndecl))
2322 CASE_FLT_FN (BUILT_IN_ILOGB):
2323 errno_set = true; builtin_optab = ilogb_optab; break;
2324 CASE_FLT_FN (BUILT_IN_ISINF):
2325 builtin_optab = isinf_optab; break;
2326 case BUILT_IN_ISNORMAL:
2327 case BUILT_IN_ISFINITE:
2328 CASE_FLT_FN (BUILT_IN_FINITE):
2329 case BUILT_IN_FINITED32:
2330 case BUILT_IN_FINITED64:
2331 case BUILT_IN_FINITED128:
2332 case BUILT_IN_ISINFD32:
2333 case BUILT_IN_ISINFD64:
2334 case BUILT_IN_ISINFD128:
2335 /* These builtins have no optabs (yet). */
2341 /* There's no easy way to detect the case we need to set EDOM. */
2342 if (flag_errno_math && errno_set)
2343 return CODE_FOR_nothing;
2345 /* Optab mode depends on the mode of the input argument. */
2346 mode = TYPE_MODE (TREE_TYPE (arg));
2349 return optab_handler (builtin_optab, mode);
2350 return CODE_FOR_nothing;
2353 /* Expand a call to one of the builtin math functions that operate on
2354 floating point argument and output an integer result (ilogb, isinf,
2356 Return 0 if a normal call should be emitted rather than expanding the
2357 function in-line. EXP is the expression that is a call to the builtin
2358 function; if convenient, the result should be placed in TARGET.
2359 SUBTARGET may be used as the target for computing one of EXP's operands. */
2362 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2364 enum insn_code icode = CODE_FOR_nothing;
2366 tree fndecl = get_callee_fndecl (exp);
2367 enum machine_mode mode;
2370 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2373 arg = CALL_EXPR_ARG (exp, 0);
2374 icode = interclass_mathfn_icode (arg, fndecl);
2375 mode = TYPE_MODE (TREE_TYPE (arg));
2377 if (icode != CODE_FOR_nothing)
2379 rtx last = get_last_insn ();
2380 tree orig_arg = arg;
2381 /* Make a suitable register to place result in. */
2383 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2384 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2385 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2387 gcc_assert (insn_data[icode].operand[0].predicate
2388 (target, GET_MODE (target)));
2390 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2391 need to expand the argument again. This way, we will not perform
2392 side-effects more the once. */
2393 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2395 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2397 if (mode != GET_MODE (op0))
2398 op0 = convert_to_mode (mode, op0, 0);
2400 /* Compute into TARGET.
2401 Set TARGET to wherever the result comes back. */
2402 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2404 delete_insns_since (last);
2405 CALL_EXPR_ARG (exp, 0) = orig_arg;
2411 /* Expand a call to the builtin sincos math function.
2412 Return NULL_RTX if a normal call should be emitted rather than expanding the
2413 function in-line. EXP is the expression that is a call to the builtin
2417 expand_builtin_sincos (tree exp)
2419 rtx op0, op1, op2, target1, target2;
2420 enum machine_mode mode;
2421 tree arg, sinp, cosp;
2423 location_t loc = EXPR_LOCATION (exp);
2425 if (!validate_arglist (exp, REAL_TYPE,
2426 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2429 arg = CALL_EXPR_ARG (exp, 0);
2430 sinp = CALL_EXPR_ARG (exp, 1);
2431 cosp = CALL_EXPR_ARG (exp, 2);
2433 /* Make a suitable register to place result in. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2436 /* Check if sincos insn is available, otherwise emit the call. */
2437 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2440 target1 = gen_reg_rtx (mode);
2441 target2 = gen_reg_rtx (mode);
2443 op0 = expand_normal (arg);
2444 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2445 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2447 /* Compute into target1 and target2.
2448 Set TARGET to wherever the result comes back. */
2449 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2450 gcc_assert (result);
2452 /* Move target1 and target2 to the memory locations indicated
2454 emit_move_insn (op1, target1);
2455 emit_move_insn (op2, target2);
2460 /* Expand a call to the internal cexpi builtin to the sincos math function.
2461 EXP is the expression that is a call to the builtin function; if convenient,
2462 the result should be placed in TARGET. SUBTARGET may be used as the target
2463 for computing one of EXP's operands. */
2466 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2468 tree fndecl = get_callee_fndecl (exp);
2470 enum machine_mode mode;
2472 location_t loc = EXPR_LOCATION (exp);
2474 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2477 arg = CALL_EXPR_ARG (exp, 0);
2478 type = TREE_TYPE (arg);
2479 mode = TYPE_MODE (TREE_TYPE (arg));
2481 /* Try expanding via a sincos optab, fall back to emitting a libcall
2482 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2483 is only generated from sincos, cexp or if we have either of them. */
2484 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2486 op1 = gen_reg_rtx (mode);
2487 op2 = gen_reg_rtx (mode);
2489 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2491 /* Compute into op1 and op2. */
2492 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2494 else if (TARGET_HAS_SINCOS)
2496 tree call, fn = NULL_TREE;
2500 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2501 fn = built_in_decls[BUILT_IN_SINCOSF];
2502 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2503 fn = built_in_decls[BUILT_IN_SINCOS];
2504 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2505 fn = built_in_decls[BUILT_IN_SINCOSL];
2509 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2510 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2511 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2512 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2513 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2514 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2516 /* Make sure not to fold the sincos call again. */
2517 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2518 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2519 call, 3, arg, top1, top2));
2523 tree call, fn = NULL_TREE, narg;
2524 tree ctype = build_complex_type (type);
2526 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2527 fn = built_in_decls[BUILT_IN_CEXPF];
2528 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2529 fn = built_in_decls[BUILT_IN_CEXP];
2530 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2531 fn = built_in_decls[BUILT_IN_CEXPL];
2535 /* If we don't have a decl for cexp create one. This is the
2536 friendliest fallback if the user calls __builtin_cexpi
2537 without full target C99 function support. */
2538 if (fn == NULL_TREE)
2541 const char *name = NULL;
2543 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2545 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2547 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2550 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2551 fn = build_fn_decl (name, fntype);
2554 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2555 build_real (type, dconst0), arg);
2557 /* Make sure not to fold the cexp call again. */
2558 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2559 return expand_expr (build_call_nary (ctype, call, 1, narg),
2560 target, VOIDmode, EXPAND_NORMAL);
2563 /* Now build the proper return type. */
2564 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2565 make_tree (TREE_TYPE (arg), op2),
2566 make_tree (TREE_TYPE (arg), op1)),
2567 target, VOIDmode, EXPAND_NORMAL);
2570 /* Conveniently construct a function call expression. FNDECL names the
2571 function to be called, N is the number of arguments, and the "..."
2572 parameters are the argument expressions. Unlike build_call_exr
2573 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2576 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2579 tree fntype = TREE_TYPE (fndecl);
2580 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2583 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2585 SET_EXPR_LOCATION (fn, loc);
2589 /* Expand a call to one of the builtin rounding functions gcc defines
2590 as an extension (lfloor and lceil). As these are gcc extensions we
2591 do not need to worry about setting errno to EDOM.
2592 If expanding via optab fails, lower expression to (int)(floor(x)).
2593 EXP is the expression that is a call to the builtin function;
2594 if convenient, the result should be placed in TARGET. */
2597 expand_builtin_int_roundingfn (tree exp, rtx target)
2599 convert_optab builtin_optab;
2600 rtx op0, insns, tmp;
2601 tree fndecl = get_callee_fndecl (exp);
2602 enum built_in_function fallback_fn;
2603 tree fallback_fndecl;
2604 enum machine_mode mode;
2607 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2610 arg = CALL_EXPR_ARG (exp, 0);
2612 switch (DECL_FUNCTION_CODE (fndecl))
2614 CASE_FLT_FN (BUILT_IN_LCEIL):
2615 CASE_FLT_FN (BUILT_IN_LLCEIL):
2616 builtin_optab = lceil_optab;
2617 fallback_fn = BUILT_IN_CEIL;
2620 CASE_FLT_FN (BUILT_IN_LFLOOR):
2621 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2622 builtin_optab = lfloor_optab;
2623 fallback_fn = BUILT_IN_FLOOR;
2630 /* Make a suitable register to place result in. */
2631 mode = TYPE_MODE (TREE_TYPE (exp));
2633 target = gen_reg_rtx (mode);
2635 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2636 need to expand the argument again. This way, we will not perform
2637 side-effects more the once. */
2638 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2640 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2644 /* Compute into TARGET. */
2645 if (expand_sfix_optab (target, op0, builtin_optab))
2647 /* Output the entire sequence. */
2648 insns = get_insns ();
2654 /* If we were unable to expand via the builtin, stop the sequence
2655 (without outputting the insns). */
2658 /* Fall back to floating point rounding optab. */
2659 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2661 /* For non-C99 targets we may end up without a fallback fndecl here
2662 if the user called __builtin_lfloor directly. In this case emit
2663 a call to the floor/ceil variants nevertheless. This should result
2664 in the best user experience for not full C99 targets. */
2665 if (fallback_fndecl == NULL_TREE)
2668 const char *name = NULL;
2670 switch (DECL_FUNCTION_CODE (fndecl))
2672 case BUILT_IN_LCEIL:
2673 case BUILT_IN_LLCEIL:
2676 case BUILT_IN_LCEILF:
2677 case BUILT_IN_LLCEILF:
2680 case BUILT_IN_LCEILL:
2681 case BUILT_IN_LLCEILL:
2684 case BUILT_IN_LFLOOR:
2685 case BUILT_IN_LLFLOOR:
2688 case BUILT_IN_LFLOORF:
2689 case BUILT_IN_LLFLOORF:
2692 case BUILT_IN_LFLOORL:
2693 case BUILT_IN_LLFLOORL:
2700 fntype = build_function_type_list (TREE_TYPE (arg),
2701 TREE_TYPE (arg), NULL_TREE);
2702 fallback_fndecl = build_fn_decl (name, fntype);
2705 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2707 tmp = expand_normal (exp);
2709 /* Truncate the result of floating point optab to integer
2710 via expand_fix (). */
2711 target = gen_reg_rtx (mode);
2712 expand_fix (target, tmp, 0);
2717 /* Expand a call to one of the builtin math functions doing integer
2719 Return 0 if a normal call should be emitted rather than expanding the
2720 function in-line. EXP is the expression that is a call to the builtin
2721 function; if convenient, the result should be placed in TARGET. */
2724 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2726 convert_optab builtin_optab;
2728 tree fndecl = get_callee_fndecl (exp);
2730 enum machine_mode mode;
2732 /* There's no easy way to detect the case we need to set EDOM. */
2733 if (flag_errno_math)
2736 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2739 arg = CALL_EXPR_ARG (exp, 0);
2741 switch (DECL_FUNCTION_CODE (fndecl))
2743 CASE_FLT_FN (BUILT_IN_LRINT):
2744 CASE_FLT_FN (BUILT_IN_LLRINT):
2745 builtin_optab = lrint_optab; break;
2746 CASE_FLT_FN (BUILT_IN_LROUND):
2747 CASE_FLT_FN (BUILT_IN_LLROUND):
2748 builtin_optab = lround_optab; break;
2753 /* Make a suitable register to place result in. */
2754 mode = TYPE_MODE (TREE_TYPE (exp));
2756 target = gen_reg_rtx (mode);
2758 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2759 need to expand the argument again. This way, we will not perform
2760 side-effects more the once. */
2761 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2763 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2767 if (expand_sfix_optab (target, op0, builtin_optab))
2769 /* Output the entire sequence. */
2770 insns = get_insns ();
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns) and call to the library function
2778 with the stabilized argument list. */
2781 target = expand_call (exp, target, target == const0_rtx);
2786 /* To evaluate powi(x,n), the floating point value x raised to the
2787 constant integer exponent n, we use a hybrid algorithm that
2788 combines the "window method" with look-up tables. For an
2789 introduction to exponentiation algorithms and "addition chains",
2790 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2791 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2792 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2793 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2795 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2796 multiplications to inline before calling the system library's pow
2797 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2798 so this default never requires calling pow, powf or powl. */
2800 #ifndef POWI_MAX_MULTS
2801 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2804 /* The size of the "optimal power tree" lookup table. All
2805 exponents less than this value are simply looked up in the
2806 powi_table below. This threshold is also used to size the
2807 cache of pseudo registers that hold intermediate results. */
2808 #define POWI_TABLE_SIZE 256
2810 /* The size, in bits of the window, used in the "window method"
2811 exponentiation algorithm. This is equivalent to a radix of
2812 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2813 #define POWI_WINDOW_SIZE 3
2815 /* The following table is an efficient representation of an
2816 "optimal power tree". For each value, i, the corresponding
2817 value, j, in the table states than an optimal evaluation
2818 sequence for calculating pow(x,i) can be found by evaluating
2819 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2820 100 integers is given in Knuth's "Seminumerical algorithms". */
2822 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2824 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2825 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2826 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2827 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2828 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2829 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2830 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2831 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2832 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2833 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2834 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2835 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2836 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2837 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2838 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2839 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2840 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2841 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2842 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2843 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2844 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2845 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2846 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2847 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2848 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2849 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2850 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2851 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2852 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2853 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2854 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2855 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2859 /* Return the number of multiplications required to calculate
2860 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2861 subroutine of powi_cost. CACHE is an array indicating
2862 which exponents have already been calculated. */
2865 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2867 /* If we've already calculated this exponent, then this evaluation
2868 doesn't require any additional multiplications. */
2873 return powi_lookup_cost (n - powi_table[n], cache)
2874 + powi_lookup_cost (powi_table[n], cache) + 1;
2877 /* Return the number of multiplications required to calculate
2878 powi(x,n) for an arbitrary x, given the exponent N. This
2879 function needs to be kept in sync with expand_powi below. */
2882 powi_cost (HOST_WIDE_INT n)
2884 bool cache[POWI_TABLE_SIZE];
2885 unsigned HOST_WIDE_INT digit;
2886 unsigned HOST_WIDE_INT val;
2892 /* Ignore the reciprocal when calculating the cost. */
2893 val = (n < 0) ? -n : n;
2895 /* Initialize the exponent cache. */
2896 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2901 while (val >= POWI_TABLE_SIZE)
2905 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2906 result += powi_lookup_cost (digit, cache)
2907 + POWI_WINDOW_SIZE + 1;
2908 val >>= POWI_WINDOW_SIZE;
2917 return result + powi_lookup_cost (val, cache);
2920 /* Recursive subroutine of expand_powi. This function takes the array,
2921 CACHE, of already calculated exponents and an exponent N and returns
2922 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2925 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2927 unsigned HOST_WIDE_INT digit;
2931 if (n < POWI_TABLE_SIZE)
2936 target = gen_reg_rtx (mode);
2939 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2940 op1 = expand_powi_1 (mode, powi_table[n], cache);
2944 target = gen_reg_rtx (mode);
2945 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2946 op0 = expand_powi_1 (mode, n - digit, cache);
2947 op1 = expand_powi_1 (mode, digit, cache);
2951 target = gen_reg_rtx (mode);
2952 op0 = expand_powi_1 (mode, n >> 1, cache);
2956 result = expand_mult (mode, op0, op1, target, 0);
2957 if (result != target)
2958 emit_move_insn (target, result);
2962 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2963 floating point operand in mode MODE, and N is the exponent. This
2964 function needs to be kept in sync with powi_cost above. */
2967 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2969 rtx cache[POWI_TABLE_SIZE];
2973 return CONST1_RTX (mode);
2975 memset (cache, 0, sizeof (cache));
2978 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2980 /* If the original exponent was negative, reciprocate the result. */
2982 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2983 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2988 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2989 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2990 if we can simplify it. */
2992 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2995 if (TREE_CODE (arg1) == REAL_CST
2996 && !TREE_OVERFLOW (arg1)
2997 && flag_unsafe_math_optimizations)
2999 enum machine_mode mode = TYPE_MODE (type);
3000 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3001 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3002 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3003 tree op = NULL_TREE;
3007 /* Optimize pow (x, 0.5) into sqrt. */
3008 if (REAL_VALUES_EQUAL (c, dconsthalf))
3009 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3013 REAL_VALUE_TYPE dconst1_4 = dconst1;
3014 REAL_VALUE_TYPE dconst3_4;
3015 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3017 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3018 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3020 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3021 machines that a builtin sqrt instruction is smaller than a
3022 call to pow with 0.25, so do this optimization even if
3024 if (REAL_VALUES_EQUAL (c, dconst1_4))
3026 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3027 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3030 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3031 are optimizing for space. */
3032 else if (optimize_insn_for_speed_p ()
3033 && !TREE_SIDE_EFFECTS (arg0)
3034 && REAL_VALUES_EQUAL (c, dconst3_4))
3036 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3037 tree sqrt2 = builtin_save_expr (sqrt1);
3038 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3039 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3044 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3045 cbrt/sqrts instead of pow (x, 1./6.). */
3047 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3049 /* First try 1/3. */
3050 REAL_VALUE_TYPE dconst1_3
3051 = real_value_truncate (mode, dconst_third ());
3053 if (REAL_VALUES_EQUAL (c, dconst1_3))
3054 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3057 else if (optimize_insn_for_speed_p ())
3059 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3060 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3062 if (REAL_VALUES_EQUAL (c, dconst1_6))
3064 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3065 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3071 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3077 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3078 a normal call should be emitted rather than expanding the function
3079 in-line. EXP is the expression that is a call to the builtin
3080 function; if convenient, the result should be placed in TARGET. */
3083 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3087 tree type = TREE_TYPE (exp);
3088 REAL_VALUE_TYPE cint, c, c2;
3091 enum machine_mode mode = TYPE_MODE (type);
3093 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3096 arg0 = CALL_EXPR_ARG (exp, 0);
3097 arg1 = CALL_EXPR_ARG (exp, 1);
3099 if (TREE_CODE (arg1) != REAL_CST
3100 || TREE_OVERFLOW (arg1))
3101 return expand_builtin_mathfn_2 (exp, target, subtarget);
3103 /* Handle constant exponents. */
3105 /* For integer valued exponents we can expand to an optimal multiplication
3106 sequence using expand_powi. */
3107 c = TREE_REAL_CST (arg1);
3108 n = real_to_integer (&c);
3109 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3110 if (real_identical (&c, &cint)
3111 && ((n >= -1 && n <= 2)
3112 || (flag_unsafe_math_optimizations
3113 && optimize_insn_for_speed_p ()
3114 && powi_cost (n) <= POWI_MAX_MULTS)))
3116 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3119 op = force_reg (mode, op);
3120 op = expand_powi (op, mode, n);
3125 narg0 = builtin_save_expr (arg0);
3127 /* If the exponent is not integer valued, check if it is half of an integer.
3128 In this case we can expand to sqrt (x) * x**(n/2). */
3129 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3130 if (fn != NULL_TREE)
3132 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3133 n = real_to_integer (&c2);
3134 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3135 if (real_identical (&c2, &cint)
3136 && ((flag_unsafe_math_optimizations
3137 && optimize_insn_for_speed_p ()
3138 && powi_cost (n/2) <= POWI_MAX_MULTS)
3139 /* Even the c == 0.5 case cannot be done unconditionally
3140 when we need to preserve signed zeros, as
3141 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3142 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3143 /* For c == 1.5 we can assume that x * sqrt (x) is always
3144 smaller than pow (x, 1.5) if sqrt will not be expanded
3147 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3149 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3151 /* Use expand_expr in case the newly built call expression
3152 was folded to a non-call. */
3153 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3156 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3157 op2 = force_reg (mode, op2);
3158 op2 = expand_powi (op2, mode, abs (n / 2));
3159 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3160 0, OPTAB_LIB_WIDEN);
3161 /* If the original exponent was negative, reciprocate the
3164 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3165 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3171 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3173 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3178 /* Try if the exponent is a third of an integer. In this case
3179 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3180 different from pow (x, 1./3.) due to rounding and behavior
3181 with negative x we need to constrain this transformation to
3182 unsafe math and positive x or finite math. */
3183 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3185 && flag_unsafe_math_optimizations
3186 && (tree_expr_nonnegative_p (arg0)
3187 || !HONOR_NANS (mode)))
3189 REAL_VALUE_TYPE dconst3;
3190 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3191 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3192 real_round (&c2, mode, &c2);
3193 n = real_to_integer (&c2);
3194 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3195 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3196 real_convert (&c2, mode, &c2);
3197 if (real_identical (&c2, &c)
3198 && ((optimize_insn_for_speed_p ()
3199 && powi_cost (n/3) <= POWI_MAX_MULTS)
3202 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3204 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3205 if (abs (n) % 3 == 2)
3206 op = expand_simple_binop (mode, MULT, op, op, op,
3207 0, OPTAB_LIB_WIDEN);
3210 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3211 op2 = force_reg (mode, op2);
3212 op2 = expand_powi (op2, mode, abs (n / 3));
3213 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3214 0, OPTAB_LIB_WIDEN);
3215 /* If the original exponent was negative, reciprocate the
3218 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3219 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3225 /* Fall back to optab expansion. */
3226 return expand_builtin_mathfn_2 (exp, target, subtarget);
3229 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3230 a normal call should be emitted rather than expanding the function
3231 in-line. EXP is the expression that is a call to the builtin
3232 function; if convenient, the result should be placed in TARGET. */
3235 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3239 enum machine_mode mode;
3240 enum machine_mode mode2;
3242 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3245 arg0 = CALL_EXPR_ARG (exp, 0);
3246 arg1 = CALL_EXPR_ARG (exp, 1);
3247 mode = TYPE_MODE (TREE_TYPE (exp));
3249 /* Handle constant power. */
3251 if (TREE_CODE (arg1) == INTEGER_CST
3252 && !TREE_OVERFLOW (arg1))
3254 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3256 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3257 Otherwise, check the number of multiplications required. */
3258 if ((TREE_INT_CST_HIGH (arg1) == 0
3259 || TREE_INT_CST_HIGH (arg1) == -1)
3260 && ((n >= -1 && n <= 2)
3261 || (optimize_insn_for_speed_p ()
3262 && powi_cost (n) <= POWI_MAX_MULTS)))
3264 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3265 op0 = force_reg (mode, op0);
3266 return expand_powi (op0, mode, n);
3270 /* Emit a libcall to libgcc. */
3272 /* Mode of the 2nd argument must match that of an int. */
3273 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3275 if (target == NULL_RTX)
3276 target = gen_reg_rtx (mode);
3278 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3279 if (GET_MODE (op0) != mode)
3280 op0 = convert_to_mode (mode, op0, 0);
3281 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3282 if (GET_MODE (op1) != mode2)
3283 op1 = convert_to_mode (mode2, op1, 0);
3285 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3286 target, LCT_CONST, mode, 2,
3287 op0, mode, op1, mode2);
3292 /* Expand expression EXP which is a call to the strlen builtin. Return
3293 NULL_RTX if we failed the caller should emit a normal call, otherwise
3294 try to get the result in TARGET, if convenient. */
3297 expand_builtin_strlen (tree exp, rtx target,
3298 enum machine_mode target_mode)
3300 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3306 tree src = CALL_EXPR_ARG (exp, 0);
3307 rtx result, src_reg, char_rtx, before_strlen;
3308 enum machine_mode insn_mode = target_mode, char_mode;
3309 enum insn_code icode = CODE_FOR_nothing;
3312 /* If the length can be computed at compile-time, return it. */
3313 len = c_strlen (src, 0);
3315 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3317 /* If the length can be computed at compile-time and is constant
3318 integer, but there are side-effects in src, evaluate
3319 src for side-effects, then return len.
3320 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3321 can be optimized into: i++; x = 3; */
3322 len = c_strlen (src, 1);
3323 if (len && TREE_CODE (len) == INTEGER_CST)
3325 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3326 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3329 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3331 /* If SRC is not a pointer type, don't do this operation inline. */
3335 /* Bail out if we can't compute strlen in the right mode. */
3336 while (insn_mode != VOIDmode)
3338 icode = optab_handler (strlen_optab, insn_mode);
3339 if (icode != CODE_FOR_nothing)
3342 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3344 if (insn_mode == VOIDmode)
3347 /* Make a place to write the result of the instruction. */
3351 && GET_MODE (result) == insn_mode
3352 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3353 result = gen_reg_rtx (insn_mode);
3355 /* Make a place to hold the source address. We will not expand
3356 the actual source until we are sure that the expansion will
3357 not fail -- there are trees that cannot be expanded twice. */
3358 src_reg = gen_reg_rtx (Pmode);
3360 /* Mark the beginning of the strlen sequence so we can emit the
3361 source operand later. */
3362 before_strlen = get_last_insn ();
3364 char_rtx = const0_rtx;
3365 char_mode = insn_data[(int) icode].operand[2].mode;
3366 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3368 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3370 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3371 char_rtx, GEN_INT (align));
3376 /* Now that we are assured of success, expand the source. */
3378 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3380 emit_move_insn (src_reg, pat);
3385 emit_insn_after (pat, before_strlen);
3387 emit_insn_before (pat, get_insns ());
3389 /* Return the value in the proper mode for this function. */
3390 if (GET_MODE (result) == target_mode)
3392 else if (target != 0)
3393 convert_move (target, result, 0);
3395 target = convert_to_mode (target_mode, result, 0);
3401 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3402 bytes from constant string DATA + OFFSET and return it as target
3406 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3407 enum machine_mode mode)
3409 const char *str = (const char *) data;
3411 gcc_assert (offset >= 0
3412 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3413 <= strlen (str) + 1));
3415 return c_readstr (str + offset, mode);
3418 /* Expand a call EXP to the memcpy builtin.
3419 Return NULL_RTX if we failed, the caller should emit a normal call,
3420 otherwise try to get the result in TARGET, if convenient (and in
3421 mode MODE if that's convenient). */
3424 expand_builtin_memcpy (tree exp, rtx target)
3426 if (!validate_arglist (exp,
3427 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3431 tree dest = CALL_EXPR_ARG (exp, 0);
3432 tree src = CALL_EXPR_ARG (exp, 1);
3433 tree len = CALL_EXPR_ARG (exp, 2);
3434 const char *src_str;
3435 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3436 unsigned int dest_align
3437 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3438 rtx dest_mem, src_mem, dest_addr, len_rtx;
3439 HOST_WIDE_INT expected_size = -1;
3440 unsigned int expected_align = 0;
3442 /* If DEST is not a pointer type, call the normal function. */
3443 if (dest_align == 0)
3446 /* If either SRC is not a pointer type, don't do this
3447 operation in-line. */
3451 if (currently_expanding_gimple_stmt)
3452 stringop_block_profile (currently_expanding_gimple_stmt,
3453 &expected_align, &expected_size);
3455 if (expected_align < dest_align)
3456 expected_align = dest_align;
3457 dest_mem = get_memory_rtx (dest, len);
3458 set_mem_align (dest_mem, dest_align);
3459 len_rtx = expand_normal (len);
3460 src_str = c_getstr (src);
3462 /* If SRC is a string constant and block move would be done
3463 by pieces, we can avoid loading the string from memory
3464 and only stored the computed constants. */
3466 && CONST_INT_P (len_rtx)
3467 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3468 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3469 CONST_CAST (char *, src_str),
3472 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3473 builtin_memcpy_read_str,
3474 CONST_CAST (char *, src_str),
3475 dest_align, false, 0);
3476 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3477 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3481 src_mem = get_memory_rtx (src, len);
3482 set_mem_align (src_mem, src_align);
3484 /* Copy word part most expediently. */
3485 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3486 CALL_EXPR_TAILCALL (exp)
3487 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3488 expected_align, expected_size);
3492 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3493 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3499 /* Expand a call EXP to the mempcpy builtin.
3500 Return NULL_RTX if we failed; the caller should emit a normal call,
3501 otherwise try to get the result in TARGET, if convenient (and in
3502 mode MODE if that's convenient). If ENDP is 0 return the
3503 destination pointer, if ENDP is 1 return the end pointer ala
3504 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3508 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3515 tree dest = CALL_EXPR_ARG (exp, 0);
3516 tree src = CALL_EXPR_ARG (exp, 1);
3517 tree len = CALL_EXPR_ARG (exp, 2);
3518 return expand_builtin_mempcpy_args (dest, src, len,
3519 target, mode, /*endp=*/ 1);
3523 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3524 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_mempcpy. */
3530 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3531 rtx target, enum machine_mode mode, int endp)
3533 /* If return value is ignored, transform mempcpy into memcpy. */
3534 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3536 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3537 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3539 return expand_expr (result, target, mode, EXPAND_NORMAL);
3543 const char *src_str;
3544 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3545 unsigned int dest_align
3546 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3547 rtx dest_mem, src_mem, len_rtx;
3549 /* If either SRC or DEST is not a pointer type, don't do this
3550 operation in-line. */
3551 if (dest_align == 0 || src_align == 0)
3554 /* If LEN is not constant, call the normal function. */
3555 if (! host_integerp (len, 1))
3558 len_rtx = expand_normal (len);
3559 src_str = c_getstr (src);
3561 /* If SRC is a string constant and block move would be done
3562 by pieces, we can avoid loading the string from memory
3563 and only stored the computed constants. */
3565 && CONST_INT_P (len_rtx)
3566 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3567 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3568 CONST_CAST (char *, src_str),
3571 dest_mem = get_memory_rtx (dest, len);
3572 set_mem_align (dest_mem, dest_align);
3573 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3574 builtin_memcpy_read_str,
3575 CONST_CAST (char *, src_str),
3576 dest_align, false, endp);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3582 if (CONST_INT_P (len_rtx)
3583 && can_move_by_pieces (INTVAL (len_rtx),
3584 MIN (dest_align, src_align)))
3586 dest_mem = get_memory_rtx (dest, len);
3587 set_mem_align (dest_mem, dest_align);
3588 src_mem = get_memory_rtx (src, len);
3589 set_mem_align (src_mem, src_align);
3590 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3591 MIN (dest_align, src_align), endp);
3592 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3593 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3602 # define HAVE_movstr 0
3603 # define CODE_FOR_movstr CODE_FOR_nothing
3606 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3607 we failed, the caller should emit a normal call, otherwise try to
3608 get the result in TARGET, if convenient. If ENDP is 0 return the
3609 destination pointer, if ENDP is 1 return the end pointer ala
3610 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3614 expand_movstr (tree dest, tree src, rtx target, int endp)
3620 const struct insn_data_d * data;
3625 dest_mem = get_memory_rtx (dest, NULL);
3626 src_mem = get_memory_rtx (src, NULL);
3627 data = insn_data + CODE_FOR_movstr;
3630 target = force_reg (Pmode, XEXP (dest_mem, 0));
3631 dest_mem = replace_equiv_address (dest_mem, target);
3632 end = gen_reg_rtx (Pmode);
3637 || target == const0_rtx
3638 || ! (*data->operand[0].predicate) (target, Pmode))
3640 end = gen_reg_rtx (Pmode);
3641 if (target != const0_rtx)
3648 if (data->operand[0].mode != VOIDmode)
3649 end = gen_lowpart (data->operand[0].mode, end);
3651 insn = data->genfun (end, dest_mem, src_mem);
3657 /* movstr is supposed to set end to the address of the NUL
3658 terminator. If the caller requested a mempcpy-like return value,
3660 if (endp == 1 && target != const0_rtx)
3662 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3663 emit_move_insn (target, force_operand (tem, NULL_RTX));
3669 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call, otherwise
3671 try to get the result in TARGET, if convenient (and in mode MODE if that's
3675 expand_builtin_strcpy (tree exp, rtx target)
3677 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3679 tree dest = CALL_EXPR_ARG (exp, 0);
3680 tree src = CALL_EXPR_ARG (exp, 1);
3681 return expand_builtin_strcpy_args (dest, src, target);
3686 /* Helper function to do the actual work for expand_builtin_strcpy. The
3687 arguments to the builtin_strcpy call DEST and SRC are broken out
3688 so that this can also be called without constructing an actual CALL_EXPR.
3689 The other arguments and return value are the same as for
3690 expand_builtin_strcpy. */
3693 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3695 return expand_movstr (dest, src, target, /*endp=*/0);
3698 /* Expand a call EXP to the stpcpy builtin.
3699 Return NULL_RTX if we failed the caller should emit a normal call,
3700 otherwise try to get the result in TARGET, if convenient (and in
3701 mode MODE if that's convenient). */
3704 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3707 location_t loc = EXPR_LOCATION (exp);
3709 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3712 dst = CALL_EXPR_ARG (exp, 0);
3713 src = CALL_EXPR_ARG (exp, 1);
3715 /* If return value is ignored, transform stpcpy into strcpy. */
3716 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3718 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3719 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3720 return expand_expr (result, target, mode, EXPAND_NORMAL);
3727 /* Ensure we get an actual string whose length can be evaluated at
3728 compile-time, not an expression containing a string. This is
3729 because the latter will potentially produce pessimized code
3730 when used to produce the return value. */
3731 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3732 return expand_movstr (dst, src, target, /*endp=*/2);
3734 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3735 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3736 target, mode, /*endp=*/2);
3741 if (TREE_CODE (len) == INTEGER_CST)
3743 rtx len_rtx = expand_normal (len);
3745 if (CONST_INT_P (len_rtx))
3747 ret = expand_builtin_strcpy_args (dst, src, target);
3753 if (mode != VOIDmode)
3754 target = gen_reg_rtx (mode);
3756 target = gen_reg_rtx (GET_MODE (ret));
3758 if (GET_MODE (target) != GET_MODE (ret))
3759 ret = gen_lowpart (GET_MODE (target), ret);
3761 ret = plus_constant (ret, INTVAL (len_rtx));
3762 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3770 return expand_movstr (dst, src, target, /*endp=*/2);
3774 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3779 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3780 enum machine_mode mode)
3782 const char *str = (const char *) data;
3784 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3787 return c_readstr (str + offset, mode);
3790 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3791 NULL_RTX if we failed the caller should emit a normal call. */
3794 expand_builtin_strncpy (tree exp, rtx target)
3796 location_t loc = EXPR_LOCATION (exp);
3798 if (validate_arglist (exp,
3799 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3801 tree dest = CALL_EXPR_ARG (exp, 0);
3802 tree src = CALL_EXPR_ARG (exp, 1);
3803 tree len = CALL_EXPR_ARG (exp, 2);
3804 tree slen = c_strlen (src, 1);
3806 /* We must be passed a constant len and src parameter. */
3807 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3810 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3812 /* We're required to pad with trailing zeros if the requested
3813 len is greater than strlen(s2)+1. In that case try to
3814 use store_by_pieces, if it fails, punt. */
3815 if (tree_int_cst_lt (slen, len))
3817 unsigned int dest_align
3818 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3819 const char *p = c_getstr (src);
3822 if (!p || dest_align == 0 || !host_integerp (len, 1)
3823 || !can_store_by_pieces (tree_low_cst (len, 1),
3824 builtin_strncpy_read_str,
3825 CONST_CAST (char *, p),
3829 dest_mem = get_memory_rtx (dest, len);
3830 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3831 builtin_strncpy_read_str,
3832 CONST_CAST (char *, p), dest_align, false, 0);
3833 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3834 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3841 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3842 bytes from constant string DATA + OFFSET and return it as target
3846 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3847 enum machine_mode mode)
3849 const char *c = (const char *) data;
3850 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3852 memset (p, *c, GET_MODE_SIZE (mode));
3854 return c_readstr (p, mode);
3857 /* Callback routine for store_by_pieces. Return the RTL of a register
3858 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3859 char value given in the RTL register data. For example, if mode is
3860 4 bytes wide, return the RTL for 0x01010101*data. */
3863 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3864 enum machine_mode mode)
3870 size = GET_MODE_SIZE (mode);
3874 p = XALLOCAVEC (char, size);
3875 memset (p, 1, size);
3876 coeff = c_readstr (p, mode);
3878 target = convert_to_mode (mode, (rtx) data, 1);
3879 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3880 return force_reg (mode, target);
3883 /* Expand expression EXP, which is a call to the memset builtin. Return
3884 NULL_RTX if we failed the caller should emit a normal call, otherwise
3885 try to get the result in TARGET, if convenient (and in mode MODE if that's
3889 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3896 tree dest = CALL_EXPR_ARG (exp, 0);
3897 tree val = CALL_EXPR_ARG (exp, 1);
3898 tree len = CALL_EXPR_ARG (exp, 2);
3899 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3903 /* Helper function to do the actual work for expand_builtin_memset. The
3904 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3905 so that this can also be called without constructing an actual CALL_EXPR.
3906 The other arguments and return value are the same as for
3907 expand_builtin_memset. */
3910 expand_builtin_memset_args (tree dest, tree val, tree len,
3911 rtx target, enum machine_mode mode, tree orig_exp)
3914 enum built_in_function fcode;
3916 unsigned int dest_align;
3917 rtx dest_mem, dest_addr, len_rtx;
3918 HOST_WIDE_INT expected_size = -1;
3919 unsigned int expected_align = 0;
3921 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3923 /* If DEST is not a pointer type, don't do this operation in-line. */
3924 if (dest_align == 0)
3927 if (currently_expanding_gimple_stmt)
3928 stringop_block_profile (currently_expanding_gimple_stmt,
3929 &expected_align, &expected_size);
3931 if (expected_align < dest_align)
3932 expected_align = dest_align;
3934 /* If the LEN parameter is zero, return DEST. */
3935 if (integer_zerop (len))
3937 /* Evaluate and ignore VAL in case it has side-effects. */
3938 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3939 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3942 /* Stabilize the arguments in case we fail. */
3943 dest = builtin_save_expr (dest);
3944 val = builtin_save_expr (val);
3945 len = builtin_save_expr (len);
3947 len_rtx = expand_normal (len);
3948 dest_mem = get_memory_rtx (dest, len);
3950 if (TREE_CODE (val) != INTEGER_CST)
3954 val_rtx = expand_normal (val);
3955 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3958 /* Assume that we can memset by pieces if we can store
3959 * the coefficients by pieces (in the required modes).
3960 * We can't pass builtin_memset_gen_str as that emits RTL. */
3962 if (host_integerp (len, 1)
3963 && can_store_by_pieces (tree_low_cst (len, 1),
3964 builtin_memset_read_str, &c, dest_align,
3967 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3969 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3970 builtin_memset_gen_str, val_rtx, dest_align,
3973 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3974 dest_align, expected_align,
3978 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3979 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3983 if (target_char_cast (val, &c))
3988 if (host_integerp (len, 1)
3989 && can_store_by_pieces (tree_low_cst (len, 1),
3990 builtin_memset_read_str, &c, dest_align,
3992 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3993 builtin_memset_read_str, &c, dest_align, true, 0);
3994 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3995 dest_align, expected_align,
3999 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4000 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4004 set_mem_align (dest_mem, dest_align);
4005 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4006 CALL_EXPR_TAILCALL (orig_exp)
4007 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4008 expected_align, expected_size);
4012 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4013 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4019 fndecl = get_callee_fndecl (orig_exp);
4020 fcode = DECL_FUNCTION_CODE (fndecl);
4021 if (fcode == BUILT_IN_MEMSET)
4022 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4024 else if (fcode == BUILT_IN_BZERO)
4025 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4029 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4030 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4031 return expand_call (fn, target, target == const0_rtx);
4034 /* Expand expression EXP, which is a call to the bzero builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call. */
4038 expand_builtin_bzero (tree exp)
4041 location_t loc = EXPR_LOCATION (exp);
4043 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4046 dest = CALL_EXPR_ARG (exp, 0);
4047 size = CALL_EXPR_ARG (exp, 1);
4049 /* New argument list transforming bzero(ptr x, int y) to
4050 memset(ptr x, int 0, size_t y). This is done this way
4051 so that if it isn't expanded inline, we fallback to
4052 calling bzero instead of memset. */
4054 return expand_builtin_memset_args (dest, integer_zero_node,
4055 fold_convert_loc (loc, sizetype, size),
4056 const0_rtx, VOIDmode, exp);
4059 /* Expand expression EXP, which is a call to the memcmp built-in function.
4060 Return NULL_RTX if we failed and the
4061 caller should emit a normal call, otherwise try to get the result in
4062 TARGET, if convenient (and in mode MODE, if that's convenient). */
4065 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4066 ATTRIBUTE_UNUSED enum machine_mode mode)
4068 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4070 if (!validate_arglist (exp,
4071 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4074 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4076 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4079 tree arg1 = CALL_EXPR_ARG (exp, 0);
4080 tree arg2 = CALL_EXPR_ARG (exp, 1);
4081 tree len = CALL_EXPR_ARG (exp, 2);
4083 unsigned int arg1_align
4084 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4085 unsigned int arg2_align
4086 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4087 enum machine_mode insn_mode;
4089 #ifdef HAVE_cmpmemsi
4091 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4094 #ifdef HAVE_cmpstrnsi
4096 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4101 /* If we don't have POINTER_TYPE, call the function. */
4102 if (arg1_align == 0 || arg2_align == 0)
4105 /* Make a place to write the result of the instruction. */
4108 && REG_P (result) && GET_MODE (result) == insn_mode
4109 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4110 result = gen_reg_rtx (insn_mode);
4112 arg1_rtx = get_memory_rtx (arg1, len);
4113 arg2_rtx = get_memory_rtx (arg2, len);
4114 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4116 /* Set MEM_SIZE as appropriate. */
4117 if (CONST_INT_P (arg3_rtx))
4119 set_mem_size (arg1_rtx, arg3_rtx);
4120 set_mem_size (arg2_rtx, arg3_rtx);
4123 #ifdef HAVE_cmpmemsi
4125 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4126 GEN_INT (MIN (arg1_align, arg2_align)));
4129 #ifdef HAVE_cmpstrnsi
4131 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4132 GEN_INT (MIN (arg1_align, arg2_align)));
4140 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4141 TYPE_MODE (integer_type_node), 3,
4142 XEXP (arg1_rtx, 0), Pmode,
4143 XEXP (arg2_rtx, 0), Pmode,
4144 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4145 TYPE_UNSIGNED (sizetype)),
4146 TYPE_MODE (sizetype));
4148 /* Return the value in the proper mode for this function. */
4149 mode = TYPE_MODE (TREE_TYPE (exp));
4150 if (GET_MODE (result) == mode)
4152 else if (target != 0)
4154 convert_move (target, result, 0);
4158 return convert_to_mode (mode, result, 0);
4165 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4166 if we failed the caller should emit a normal call, otherwise try to get
4167 the result in TARGET, if convenient. */
4170 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4172 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4175 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4176 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4177 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4179 rtx arg1_rtx, arg2_rtx;
4180 rtx result, insn = NULL_RTX;
4182 tree arg1 = CALL_EXPR_ARG (exp, 0);
4183 tree arg2 = CALL_EXPR_ARG (exp, 1);
4185 unsigned int arg1_align
4186 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4187 unsigned int arg2_align
4188 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4190 /* If we don't have POINTER_TYPE, call the function. */
4191 if (arg1_align == 0 || arg2_align == 0)
4194 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4195 arg1 = builtin_save_expr (arg1);
4196 arg2 = builtin_save_expr (arg2);
4198 arg1_rtx = get_memory_rtx (arg1, NULL);
4199 arg2_rtx = get_memory_rtx (arg2, NULL);
4201 #ifdef HAVE_cmpstrsi
4202 /* Try to call cmpstrsi. */
4205 enum machine_mode insn_mode
4206 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4208 /* Make a place to write the result of the instruction. */
4211 && REG_P (result) && GET_MODE (result) == insn_mode
4212 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4213 result = gen_reg_rtx (insn_mode);
4215 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4216 GEN_INT (MIN (arg1_align, arg2_align)));
4219 #ifdef HAVE_cmpstrnsi
4220 /* Try to determine at least one length and call cmpstrnsi. */
4221 if (!insn && HAVE_cmpstrnsi)
4226 enum machine_mode insn_mode
4227 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4228 tree len1 = c_strlen (arg1, 1);
4229 tree len2 = c_strlen (arg2, 1);
4232 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4234 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4236 /* If we don't have a constant length for the first, use the length
4237 of the second, if we know it. We don't require a constant for
4238 this case; some cost analysis could be done if both are available
4239 but neither is constant. For now, assume they're equally cheap,
4240 unless one has side effects. If both strings have constant lengths,
4247 else if (TREE_SIDE_EFFECTS (len1))
4249 else if (TREE_SIDE_EFFECTS (len2))
4251 else if (TREE_CODE (len1) != INTEGER_CST)
4253 else if (TREE_CODE (len2) != INTEGER_CST)
4255 else if (tree_int_cst_lt (len1, len2))
4260 /* If both arguments have side effects, we cannot optimize. */
4261 if (!len || TREE_SIDE_EFFECTS (len))
4264 arg3_rtx = expand_normal (len);
4266 /* Make a place to write the result of the instruction. */
4269 && REG_P (result) && GET_MODE (result) == insn_mode
4270 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4271 result = gen_reg_rtx (insn_mode);
4273 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4274 GEN_INT (MIN (arg1_align, arg2_align)));
4280 enum machine_mode mode;
4283 /* Return the value in the proper mode for this function. */
4284 mode = TYPE_MODE (TREE_TYPE (exp));
4285 if (GET_MODE (result) == mode)
4288 return convert_to_mode (mode, result, 0);
4289 convert_move (target, result, 0);
4293 /* Expand the library call ourselves using a stabilized argument
4294 list to avoid re-evaluating the function's arguments twice. */
4295 #ifdef HAVE_cmpstrnsi
4298 fndecl = get_callee_fndecl (exp);
4299 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4300 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4301 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4302 return expand_call (fn, target, target == const0_rtx);
4308 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4309 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4310 the result in TARGET, if convenient. */
4313 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4314 ATTRIBUTE_UNUSED enum machine_mode mode)
4316 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4318 if (!validate_arglist (exp,
4319 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4322 /* If c_strlen can determine an expression for one of the string
4323 lengths, and it doesn't have side effects, then emit cmpstrnsi
4324 using length MIN(strlen(string)+1, arg3). */
4325 #ifdef HAVE_cmpstrnsi
4328 tree len, len1, len2;
4329 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4332 tree arg1 = CALL_EXPR_ARG (exp, 0);
4333 tree arg2 = CALL_EXPR_ARG (exp, 1);
4334 tree arg3 = CALL_EXPR_ARG (exp, 2);
4336 unsigned int arg1_align
4337 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 unsigned int arg2_align
4339 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4340 enum machine_mode insn_mode
4341 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4343 len1 = c_strlen (arg1, 1);
4344 len2 = c_strlen (arg2, 1);
4347 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4349 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4351 /* If we don't have a constant length for the first, use the length
4352 of the second, if we know it. We don't require a constant for
4353 this case; some cost analysis could be done if both are available
4354 but neither is constant. For now, assume they're equally cheap,
4355 unless one has side effects. If both strings have constant lengths,
4362 else if (TREE_SIDE_EFFECTS (len1))
4364 else if (TREE_SIDE_EFFECTS (len2))
4366 else if (TREE_CODE (len1) != INTEGER_CST)
4368 else if (TREE_CODE (len2) != INTEGER_CST)
4370 else if (tree_int_cst_lt (len1, len2))
4375 /* If both arguments have side effects, we cannot optimize. */
4376 if (!len || TREE_SIDE_EFFECTS (len))
4379 /* The actual new length parameter is MIN(len,arg3). */
4380 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4381 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4383 /* If we don't have POINTER_TYPE, call the function. */
4384 if (arg1_align == 0 || arg2_align == 0)
4387 /* Make a place to write the result of the instruction. */
4390 && REG_P (result) && GET_MODE (result) == insn_mode
4391 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4392 result = gen_reg_rtx (insn_mode);
4394 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4395 arg1 = builtin_save_expr (arg1);
4396 arg2 = builtin_save_expr (arg2);
4397 len = builtin_save_expr (len);
4399 arg1_rtx = get_memory_rtx (arg1, len);
4400 arg2_rtx = get_memory_rtx (arg2, len);
4401 arg3_rtx = expand_normal (len);
4402 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4403 GEN_INT (MIN (arg1_align, arg2_align)));
4408 /* Return the value in the proper mode for this function. */
4409 mode = TYPE_MODE (TREE_TYPE (exp));
4410 if (GET_MODE (result) == mode)
4413 return convert_to_mode (mode, result, 0);
4414 convert_move (target, result, 0);
4418 /* Expand the library call ourselves using a stabilized argument
4419 list to avoid re-evaluating the function's arguments twice. */
4420 fndecl = get_callee_fndecl (exp);
4421 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4423 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4424 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4425 return expand_call (fn, target, target == const0_rtx);
4431 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4432 if that's convenient. */
4435 expand_builtin_saveregs (void)
4439 /* Don't do __builtin_saveregs more than once in a function.
4440 Save the result of the first call and reuse it. */
4441 if (saveregs_value != 0)
4442 return saveregs_value;
4444 /* When this function is called, it means that registers must be
4445 saved on entry to this function. So we migrate the call to the
4446 first insn of this function. */
4450 /* Do whatever the machine needs done in this case. */
4451 val = targetm.calls.expand_builtin_saveregs ();
4456 saveregs_value = val;
4458 /* Put the insns after the NOTE that starts the function. If this
4459 is inside a start_sequence, make the outer-level insn chain current, so
4460 the code is placed at the start of the function. */
4461 push_topmost_sequence ();
4462 emit_insn_after (seq, entry_of_function ());
4463 pop_topmost_sequence ();
4468 /* Expand a call to __builtin_next_arg. */
4471 expand_builtin_next_arg (void)
4473 /* Checking arguments is already done in fold_builtin_next_arg
4474 that must be called before this function. */
4475 return expand_binop (ptr_mode, add_optab,
4476 crtl->args.internal_arg_pointer,
4477 crtl->args.arg_offset_rtx,
4478 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4481 /* Make it easier for the backends by protecting the valist argument
4482 from multiple evaluations. */
4485 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4487 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4489 /* The current way of determining the type of valist is completely
4490 bogus. We should have the information on the va builtin instead. */
4492 vatype = targetm.fn_abi_va_list (cfun->decl);
4494 if (TREE_CODE (vatype) == ARRAY_TYPE)
4496 if (TREE_SIDE_EFFECTS (valist))
4497 valist = save_expr (valist);
4499 /* For this case, the backends will be expecting a pointer to
4500 vatype, but it's possible we've actually been given an array
4501 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4503 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4505 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4506 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4511 tree pt = build_pointer_type (vatype);
4515 if (! TREE_SIDE_EFFECTS (valist))
4518 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4519 TREE_SIDE_EFFECTS (valist) = 1;
4522 if (TREE_SIDE_EFFECTS (valist))
4523 valist = save_expr (valist);
4524 valist = fold_build2_loc (loc, MEM_REF,
4525 vatype, valist, build_int_cst (pt, 0));
4531 /* The "standard" definition of va_list is void*. */
4534 std_build_builtin_va_list (void)
4536 return ptr_type_node;
4539 /* The "standard" abi va_list is va_list_type_node. */
4542 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4544 return va_list_type_node;
4547 /* The "standard" type of va_list is va_list_type_node. */
4550 std_canonical_va_list_type (tree type)
4554 if (INDIRECT_REF_P (type))
4555 type = TREE_TYPE (type);
4556 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4557 type = TREE_TYPE (type);
4558 wtype = va_list_type_node;
4560 /* Treat structure va_list types. */
4561 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4562 htype = TREE_TYPE (htype);
4563 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4565 /* If va_list is an array type, the argument may have decayed
4566 to a pointer type, e.g. by being passed to another function.
4567 In that case, unwrap both types so that we can compare the
4568 underlying records. */
4569 if (TREE_CODE (htype) == ARRAY_TYPE
4570 || POINTER_TYPE_P (htype))
4572 wtype = TREE_TYPE (wtype);
4573 htype = TREE_TYPE (htype);
4576 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4577 return va_list_type_node;
4582 /* The "standard" implementation of va_start: just assign `nextarg' to
4586 std_expand_builtin_va_start (tree valist, rtx nextarg)
4588 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4589 convert_move (va_r, nextarg, 0);
4592 /* Expand EXP, a call to __builtin_va_start. */
4595 expand_builtin_va_start (tree exp)
4599 location_t loc = EXPR_LOCATION (exp);
4601 if (call_expr_nargs (exp) < 2)
4603 error_at (loc, "too few arguments to function %<va_start%>");
4607 if (fold_builtin_next_arg (exp, true))
4610 nextarg = expand_builtin_next_arg ();
4611 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4613 if (targetm.expand_builtin_va_start)
4614 targetm.expand_builtin_va_start (valist, nextarg);
4616 std_expand_builtin_va_start (valist, nextarg);
4621 /* The "standard" implementation of va_arg: read the value from the
4622 current (padded) address and increment by the (padded) size. */
4625 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4628 tree addr, t, type_size, rounded_size, valist_tmp;
4629 unsigned HOST_WIDE_INT align, boundary;
4632 #ifdef ARGS_GROW_DOWNWARD
4633 /* All of the alignment and movement below is for args-grow-up machines.
4634 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4635 implement their own specialized gimplify_va_arg_expr routines. */
4639 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4641 type = build_pointer_type (type);
4643 align = PARM_BOUNDARY / BITS_PER_UNIT;
4644 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4646 /* When we align parameter on stack for caller, if the parameter
4647 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4648 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4649 here with caller. */
4650 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4651 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4653 boundary /= BITS_PER_UNIT;
4655 /* Hoist the valist value into a temporary for the moment. */
4656 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4658 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4659 requires greater alignment, we must perform dynamic alignment. */
4660 if (boundary > align
4661 && !integer_zerop (TYPE_SIZE (type)))
4663 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4664 fold_build2 (POINTER_PLUS_EXPR,
4666 valist_tmp, size_int (boundary - 1)));
4667 gimplify_and_add (t, pre_p);
4669 t = fold_convert (sizetype, valist_tmp);
4670 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4671 fold_convert (TREE_TYPE (valist),
4672 fold_build2 (BIT_AND_EXPR, sizetype, t,
4673 size_int (-boundary))));
4674 gimplify_and_add (t, pre_p);
4679 /* If the actual alignment is less than the alignment of the type,
4680 adjust the type accordingly so that we don't assume strict alignment
4681 when dereferencing the pointer. */
4682 boundary *= BITS_PER_UNIT;
4683 if (boundary < TYPE_ALIGN (type))
4685 type = build_variant_type_copy (type);
4686 TYPE_ALIGN (type) = boundary;
4689 /* Compute the rounded size of the type. */
4690 type_size = size_in_bytes (type);
4691 rounded_size = round_up (type_size, align);
4693 /* Reduce rounded_size so it's sharable with the postqueue. */
4694 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4698 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4700 /* Small args are padded downward. */
4701 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4702 rounded_size, size_int (align));
4703 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4704 size_binop (MINUS_EXPR, rounded_size, type_size));
4705 addr = fold_build2 (POINTER_PLUS_EXPR,
4706 TREE_TYPE (addr), addr, t);
4709 /* Compute new value for AP. */
4710 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4711 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4712 gimplify_and_add (t, pre_p);
4714 addr = fold_convert (build_pointer_type (type), addr);
4717 addr = build_va_arg_indirect_ref (addr);
4719 return build_va_arg_indirect_ref (addr);
4722 /* Build an indirect-ref expression over the given TREE, which represents a
4723 piece of a va_arg() expansion. */
4725 build_va_arg_indirect_ref (tree addr)
4727 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4729 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4735 /* Return a dummy expression of type TYPE in order to keep going after an
4739 dummy_object (tree type)
4741 tree t = build_int_cst (build_pointer_type (type), 0);
4742 return build1 (INDIRECT_REF, type, t);
4745 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4746 builtin function, but a very special sort of operator. */
4748 enum gimplify_status
4749 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4751 tree promoted_type, have_va_type;
4752 tree valist = TREE_OPERAND (*expr_p, 0);
4753 tree type = TREE_TYPE (*expr_p);
4755 location_t loc = EXPR_LOCATION (*expr_p);
4757 /* Verify that valist is of the proper type. */
4758 have_va_type = TREE_TYPE (valist);
4759 if (have_va_type == error_mark_node)
4761 have_va_type = targetm.canonical_va_list_type (have_va_type);
4763 if (have_va_type == NULL_TREE)
4765 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4769 /* Generate a diagnostic for requesting data of a type that cannot
4770 be passed through `...' due to type promotion at the call site. */
4771 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4774 static bool gave_help;
4777 /* Unfortunately, this is merely undefined, rather than a constraint
4778 violation, so we cannot make this an error. If this call is never
4779 executed, the program is still strictly conforming. */
4780 warned = warning_at (loc, 0,
4781 "%qT is promoted to %qT when passed through %<...%>",
4782 type, promoted_type);
4783 if (!gave_help && warned)
4786 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4787 promoted_type, type);
4790 /* We can, however, treat "undefined" any way we please.
4791 Call abort to encourage the user to fix the program. */
4793 inform (loc, "if this code is reached, the program will abort");
4794 /* Before the abort, allow the evaluation of the va_list
4795 expression to exit or longjmp. */
4796 gimplify_and_add (valist, pre_p);
4797 t = build_call_expr_loc (loc,
4798 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4799 gimplify_and_add (t, pre_p);
4801 /* This is dead code, but go ahead and finish so that the
4802 mode of the result comes out right. */
4803 *expr_p = dummy_object (type);
4808 /* Make it easier for the backends by protecting the valist argument
4809 from multiple evaluations. */
4810 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4812 /* For this case, the backends will be expecting a pointer to
4813 TREE_TYPE (abi), but it's possible we've
4814 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4816 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4818 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4819 valist = fold_convert_loc (loc, p1,
4820 build_fold_addr_expr_loc (loc, valist));
4823 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4826 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4828 if (!targetm.gimplify_va_arg_expr)
4829 /* FIXME: Once most targets are converted we should merely
4830 assert this is non-null. */
4833 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4838 /* Expand EXP, a call to __builtin_va_end. */
4841 expand_builtin_va_end (tree exp)
4843 tree valist = CALL_EXPR_ARG (exp, 0);
4845 /* Evaluate for side effects, if needed. I hate macros that don't
4847 if (TREE_SIDE_EFFECTS (valist))
4848 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4853 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4854 builtin rather than just as an assignment in stdarg.h because of the
4855 nastiness of array-type va_list types. */
4858 expand_builtin_va_copy (tree exp)
4861 location_t loc = EXPR_LOCATION (exp);
4863 dst = CALL_EXPR_ARG (exp, 0);
4864 src = CALL_EXPR_ARG (exp, 1);
4866 dst = stabilize_va_list_loc (loc, dst, 1);
4867 src = stabilize_va_list_loc (loc, src, 0);
4869 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4871 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4873 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4874 TREE_SIDE_EFFECTS (t) = 1;
4875 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4879 rtx dstb, srcb, size;
4881 /* Evaluate to pointers. */
4882 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4883 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4884 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4885 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4887 dstb = convert_memory_address (Pmode, dstb);
4888 srcb = convert_memory_address (Pmode, srcb);
4890 /* "Dereference" to BLKmode memories. */
4891 dstb = gen_rtx_MEM (BLKmode, dstb);
4892 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4893 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4894 srcb = gen_rtx_MEM (BLKmode, srcb);
4895 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4896 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4899 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4905 /* Expand a call to one of the builtin functions __builtin_frame_address or
4906 __builtin_return_address. */
4909 expand_builtin_frame_address (tree fndecl, tree exp)
4911 /* The argument must be a nonnegative integer constant.
4912 It counts the number of frames to scan up the stack.
4913 The value is the return address saved in that frame. */
4914 if (call_expr_nargs (exp) == 0)
4915 /* Warning about missing arg was already issued. */
4917 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4919 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4920 error ("invalid argument to %<__builtin_frame_address%>");
4922 error ("invalid argument to %<__builtin_return_address%>");
4928 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4929 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4931 /* Some ports cannot access arbitrary stack frames. */
4934 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4935 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4937 warning (0, "unsupported argument to %<__builtin_return_address%>");
4941 /* For __builtin_frame_address, return what we've got. */
4942 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4946 && ! CONSTANT_P (tem))
4947 tem = copy_to_mode_reg (Pmode, tem);
4952 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4953 we failed and the caller should emit a normal call, otherwise try to get
4954 the result in TARGET, if convenient. */
4957 expand_builtin_alloca (tree exp, rtx target)
4962 /* Emit normal call if marked not-inlineable. */
4963 if (CALL_CANNOT_INLINE_P (exp))
4966 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4969 /* Compute the argument. */
4970 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4972 /* Allocate the desired space. */
4973 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4974 result = convert_memory_address (ptr_mode, result);
4979 /* Expand a call to a bswap builtin with argument ARG0. MODE
4980 is the mode to expand with. */
4983 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4985 enum machine_mode mode;
4989 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4992 arg = CALL_EXPR_ARG (exp, 0);
4993 mode = TYPE_MODE (TREE_TYPE (arg));
4994 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4996 target = expand_unop (mode, bswap_optab, op0, target, 1);
4998 gcc_assert (target);
5000 return convert_to_mode (mode, target, 0);
5003 /* Expand a call to a unary builtin in EXP.
5004 Return NULL_RTX if a normal call should be emitted rather than expanding the
5005 function in-line. If convenient, the result should be placed in TARGET.
5006 SUBTARGET may be used as the target for computing one of EXP's operands. */
5009 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5010 rtx subtarget, optab op_optab)
5014 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5017 /* Compute the argument. */
5018 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5019 VOIDmode, EXPAND_NORMAL);
5020 /* Compute op, into TARGET if possible.
5021 Set TARGET to wherever the result comes back. */
5022 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5023 op_optab, op0, target, 1);
5024 gcc_assert (target);
5026 return convert_to_mode (target_mode, target, 0);
5029 /* Expand a call to __builtin_expect. We just return our argument
5030 as the builtin_expect semantic should've been already executed by
5031 tree branch prediction pass. */
5034 expand_builtin_expect (tree exp, rtx target)
5038 if (call_expr_nargs (exp) < 2)
5040 arg = CALL_EXPR_ARG (exp, 0);
5042 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5043 /* When guessing was done, the hints should be already stripped away. */
5044 gcc_assert (!flag_guess_branch_prob
5045 || optimize == 0 || seen_error ());
5050 expand_builtin_trap (void)
5054 emit_insn (gen_trap ());
5057 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5061 /* Expand a call to __builtin_unreachable. We do nothing except emit
5062 a barrier saying that control flow will not pass here.
5064 It is the responsibility of the program being compiled to ensure
5065 that control flow does never reach __builtin_unreachable. */
5067 expand_builtin_unreachable (void)
5072 /* Expand EXP, a call to fabs, fabsf or fabsl.
5073 Return NULL_RTX if a normal call should be emitted rather than expanding
5074 the function inline. If convenient, the result should be placed
5075 in TARGET. SUBTARGET may be used as the target for computing
5079 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5081 enum machine_mode mode;
5085 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5088 arg = CALL_EXPR_ARG (exp, 0);
5089 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5090 mode = TYPE_MODE (TREE_TYPE (arg));
5091 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5092 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5095 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5096 Return NULL is a normal call should be emitted rather than expanding the
5097 function inline. If convenient, the result should be placed in TARGET.
5098 SUBTARGET may be used as the target for computing the operand. */
5101 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5106 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5109 arg = CALL_EXPR_ARG (exp, 0);
5110 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5112 arg = CALL_EXPR_ARG (exp, 1);
5113 op1 = expand_normal (arg);
5115 return expand_copysign (op0, op1, target);
5118 /* Create a new constant string literal and return a char* pointer to it.
5119 The STRING_CST value is the LEN characters at STR. */
5121 build_string_literal (int len, const char *str)
5123 tree t, elem, index, type;
5125 t = build_string (len, str);
5126 elem = build_type_variant (char_type_node, 1, 0);
5127 index = build_index_type (size_int (len - 1));
5128 type = build_array_type (elem, index);
5129 TREE_TYPE (t) = type;
5130 TREE_CONSTANT (t) = 1;
5131 TREE_READONLY (t) = 1;
5132 TREE_STATIC (t) = 1;
5134 type = build_pointer_type (elem);
5135 t = build1 (ADDR_EXPR, type,
5136 build4 (ARRAY_REF, elem,
5137 t, integer_zero_node, NULL_TREE, NULL_TREE));
5141 /* Expand a call to either the entry or exit function profiler. */
5144 expand_builtin_profile_func (bool exitp)
5146 rtx this_rtx, which;
5148 this_rtx = DECL_RTL (current_function_decl);
5149 gcc_assert (MEM_P (this_rtx));
5150 this_rtx = XEXP (this_rtx, 0);
5153 which = profile_function_exit_libfunc;
5155 which = profile_function_entry_libfunc;
5157 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5158 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5165 /* Expand a call to __builtin___clear_cache. */
5168 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5170 #ifndef HAVE_clear_cache
5171 #ifdef CLEAR_INSN_CACHE
5172 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5173 does something. Just do the default expansion to a call to
5177 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5178 does nothing. There is no need to call it. Do nothing. */
5180 #endif /* CLEAR_INSN_CACHE */
5182 /* We have a "clear_cache" insn, and it will handle everything. */
5184 rtx begin_rtx, end_rtx;
5185 enum insn_code icode;
5187 /* We must not expand to a library call. If we did, any
5188 fallback library function in libgcc that might contain a call to
5189 __builtin___clear_cache() would recurse infinitely. */
5190 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5192 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5196 if (HAVE_clear_cache)
5198 icode = CODE_FOR_clear_cache;
5200 begin = CALL_EXPR_ARG (exp, 0);
5201 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5202 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5203 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5204 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5206 end = CALL_EXPR_ARG (exp, 1);
5207 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5208 end_rtx = convert_memory_address (Pmode, end_rtx);
5209 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5210 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5212 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5215 #endif /* HAVE_clear_cache */
5218 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5221 round_trampoline_addr (rtx tramp)
5223 rtx temp, addend, mask;
5225 /* If we don't need too much alignment, we'll have been guaranteed
5226 proper alignment by get_trampoline_type. */
5227 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5230 /* Round address up to desired boundary. */
5231 temp = gen_reg_rtx (Pmode);
5232 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5233 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5235 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5236 temp, 0, OPTAB_LIB_WIDEN);
5237 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5238 temp, 0, OPTAB_LIB_WIDEN);
5244 expand_builtin_init_trampoline (tree exp)
5246 tree t_tramp, t_func, t_chain;
5247 rtx m_tramp, r_tramp, r_chain, tmp;
5249 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5250 POINTER_TYPE, VOID_TYPE))
5253 t_tramp = CALL_EXPR_ARG (exp, 0);
5254 t_func = CALL_EXPR_ARG (exp, 1);
5255 t_chain = CALL_EXPR_ARG (exp, 2);
5257 r_tramp = expand_normal (t_tramp);
5258 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5259 MEM_NOTRAP_P (m_tramp) = 1;
5261 /* The TRAMP argument should be the address of a field within the
5262 local function's FRAME decl. Let's see if we can fill in the
5263 to fill in the MEM_ATTRs for this memory. */
5264 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5265 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5268 tmp = round_trampoline_addr (r_tramp);
5271 m_tramp = change_address (m_tramp, BLKmode, tmp);
5272 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5273 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5276 /* The FUNC argument should be the address of the nested function.
5277 Extract the actual function decl to pass to the hook. */
5278 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5279 t_func = TREE_OPERAND (t_func, 0);
5280 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5282 r_chain = expand_normal (t_chain);
5284 /* Generate insns to initialize the trampoline. */
5285 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5287 trampolines_created = 1;
5289 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5290 "trampoline generated for nested function %qD", t_func);
5296 expand_builtin_adjust_trampoline (tree exp)
5300 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5303 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5304 tramp = round_trampoline_addr (tramp);
5305 if (targetm.calls.trampoline_adjust_address)
5306 tramp = targetm.calls.trampoline_adjust_address (tramp);
5311 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5312 function. The function first checks whether the back end provides
5313 an insn to implement signbit for the respective mode. If not, it
5314 checks whether the floating point format of the value is such that
5315 the sign bit can be extracted. If that is not the case, the
5316 function returns NULL_RTX to indicate that a normal call should be
5317 emitted rather than expanding the function in-line. EXP is the
5318 expression that is a call to the builtin function; if convenient,
5319 the result should be placed in TARGET. */
5321 expand_builtin_signbit (tree exp, rtx target)
5323 const struct real_format *fmt;
5324 enum machine_mode fmode, imode, rmode;
5327 enum insn_code icode;
5329 location_t loc = EXPR_LOCATION (exp);
5331 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5334 arg = CALL_EXPR_ARG (exp, 0);
5335 fmode = TYPE_MODE (TREE_TYPE (arg));
5336 rmode = TYPE_MODE (TREE_TYPE (exp));
5337 fmt = REAL_MODE_FORMAT (fmode);
5339 arg = builtin_save_expr (arg);
5341 /* Expand the argument yielding a RTX expression. */
5342 temp = expand_normal (arg);
5344 /* Check if the back end provides an insn that handles signbit for the
5346 icode = optab_handler (signbit_optab, fmode);
5347 if (icode != CODE_FOR_nothing)
5349 rtx last = get_last_insn ();
5350 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5351 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5353 delete_insns_since (last);
5356 /* For floating point formats without a sign bit, implement signbit
5358 bitpos = fmt->signbit_ro;
5361 /* But we can't do this if the format supports signed zero. */
5362 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5365 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5366 build_real (TREE_TYPE (arg), dconst0));
5367 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5370 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5372 imode = int_mode_for_mode (fmode);
5373 if (imode == BLKmode)
5375 temp = gen_lowpart (imode, temp);
5380 /* Handle targets with different FP word orders. */
5381 if (FLOAT_WORDS_BIG_ENDIAN)
5382 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5384 word = bitpos / BITS_PER_WORD;
5385 temp = operand_subword_force (temp, word, fmode);
5386 bitpos = bitpos % BITS_PER_WORD;
5389 /* Force the intermediate word_mode (or narrower) result into a
5390 register. This avoids attempting to create paradoxical SUBREGs
5391 of floating point modes below. */
5392 temp = force_reg (imode, temp);
5394 /* If the bitpos is within the "result mode" lowpart, the operation
5395 can be implement with a single bitwise AND. Otherwise, we need
5396 a right shift and an AND. */
5398 if (bitpos < GET_MODE_BITSIZE (rmode))
5400 double_int mask = double_int_setbit (double_int_zero, bitpos);
5402 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5403 temp = gen_lowpart (rmode, temp);
5404 temp = expand_binop (rmode, and_optab, temp,
5405 immed_double_int_const (mask, rmode),
5406 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5410 /* Perform a logical right shift to place the signbit in the least
5411 significant bit, then truncate the result to the desired mode
5412 and mask just this bit. */
5413 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5414 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5415 temp = gen_lowpart (rmode, temp);
5416 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5417 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5423 /* Expand fork or exec calls. TARGET is the desired target of the
5424 call. EXP is the call. FN is the
5425 identificator of the actual function. IGNORE is nonzero if the
5426 value is to be ignored. */
5429 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5434 /* If we are not profiling, just call the function. */
5435 if (!profile_arc_flag)
5438 /* Otherwise call the wrapper. This should be equivalent for the rest of
5439 compiler, so the code does not diverge, and the wrapper may run the
5440 code necessary for keeping the profiling sane. */
5442 switch (DECL_FUNCTION_CODE (fn))
5445 id = get_identifier ("__gcov_fork");
5448 case BUILT_IN_EXECL:
5449 id = get_identifier ("__gcov_execl");
5452 case BUILT_IN_EXECV:
5453 id = get_identifier ("__gcov_execv");
5456 case BUILT_IN_EXECLP:
5457 id = get_identifier ("__gcov_execlp");
5460 case BUILT_IN_EXECLE:
5461 id = get_identifier ("__gcov_execle");
5464 case BUILT_IN_EXECVP:
5465 id = get_identifier ("__gcov_execvp");
5468 case BUILT_IN_EXECVE:
5469 id = get_identifier ("__gcov_execve");
5476 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5477 FUNCTION_DECL, id, TREE_TYPE (fn));
5478 DECL_EXTERNAL (decl) = 1;
5479 TREE_PUBLIC (decl) = 1;
5480 DECL_ARTIFICIAL (decl) = 1;
5481 TREE_NOTHROW (decl) = 1;
5482 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5483 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5484 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5485 return expand_call (call, target, ignore);
5490 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5491 the pointer in these functions is void*, the tree optimizers may remove
5492 casts. The mode computed in expand_builtin isn't reliable either, due
5493 to __sync_bool_compare_and_swap.
5495 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5496 group of builtins. This gives us log2 of the mode size. */
5498 static inline enum machine_mode
5499 get_builtin_sync_mode (int fcode_diff)
5501 /* The size is not negotiable, so ask not to get BLKmode in return
5502 if the target indicates that a smaller size would be better. */
5503 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5506 /* Expand the memory expression LOC and return the appropriate memory operand
5507 for the builtin_sync operations. */
5510 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5514 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5515 addr = convert_memory_address (Pmode, addr);
5517 /* Note that we explicitly do not want any alias information for this
5518 memory, so that we kill all other live memories. Otherwise we don't
5519 satisfy the full barrier semantics of the intrinsic. */
5520 mem = validize_mem (gen_rtx_MEM (mode, addr));
5522 /* The alignment needs to be at least according to that of the mode. */
5523 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5524 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5525 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5526 MEM_VOLATILE_P (mem) = 1;
5531 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5532 EXP is the CALL_EXPR. CODE is the rtx code
5533 that corresponds to the arithmetic or logical operation from the name;
5534 an exception here is that NOT actually means NAND. TARGET is an optional
5535 place for us to store the results; AFTER is true if this is the
5536 fetch_and_xxx form. IGNORE is true if we don't actually care about
5537 the result of the operation at all. */
5540 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5541 enum rtx_code code, bool after,
5542 rtx target, bool ignore)
5545 enum machine_mode old_mode;
5546 location_t loc = EXPR_LOCATION (exp);
5548 if (code == NOT && warn_sync_nand)
5550 tree fndecl = get_callee_fndecl (exp);
5551 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5553 static bool warned_f_a_n, warned_n_a_f;
5557 case BUILT_IN_FETCH_AND_NAND_1:
5558 case BUILT_IN_FETCH_AND_NAND_2:
5559 case BUILT_IN_FETCH_AND_NAND_4:
5560 case BUILT_IN_FETCH_AND_NAND_8:
5561 case BUILT_IN_FETCH_AND_NAND_16:
5566 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5567 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5568 warned_f_a_n = true;
5571 case BUILT_IN_NAND_AND_FETCH_1:
5572 case BUILT_IN_NAND_AND_FETCH_2:
5573 case BUILT_IN_NAND_AND_FETCH_4:
5574 case BUILT_IN_NAND_AND_FETCH_8:
5575 case BUILT_IN_NAND_AND_FETCH_16:
5580 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5581 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5582 warned_n_a_f = true;
5590 /* Expand the operands. */
5591 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5593 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5594 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5595 of CONST_INTs, where we know the old_mode only from the call argument. */
5596 old_mode = GET_MODE (val);
5597 if (old_mode == VOIDmode)
5598 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5599 val = convert_modes (mode, old_mode, val, 1);
5602 return expand_sync_operation (mem, val, code);
5604 return expand_sync_fetch_operation (mem, val, code, after, target);
5607 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5608 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5609 true if this is the boolean form. TARGET is a place for us to store the
5610 results; this is NOT optional if IS_BOOL is true. */
5613 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5614 bool is_bool, rtx target)
5616 rtx old_val, new_val, mem;
5617 enum machine_mode old_mode;
5619 /* Expand the operands. */
5620 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5623 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5624 mode, EXPAND_NORMAL);
5625 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5626 of CONST_INTs, where we know the old_mode only from the call argument. */
5627 old_mode = GET_MODE (old_val);
5628 if (old_mode == VOIDmode)
5629 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5630 old_val = convert_modes (mode, old_mode, old_val, 1);
5632 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5633 mode, EXPAND_NORMAL);
5634 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5635 of CONST_INTs, where we know the old_mode only from the call argument. */
5636 old_mode = GET_MODE (new_val);
5637 if (old_mode == VOIDmode)
5638 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5639 new_val = convert_modes (mode, old_mode, new_val, 1);
5642 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5644 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5647 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5648 general form is actually an atomic exchange, and some targets only
5649 support a reduced form with the second argument being a constant 1.
5650 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5654 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5658 enum machine_mode old_mode;
5660 /* Expand the operands. */
5661 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5662 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5663 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5664 of CONST_INTs, where we know the old_mode only from the call argument. */
5665 old_mode = GET_MODE (val);
5666 if (old_mode == VOIDmode)
5667 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5668 val = convert_modes (mode, old_mode, val, 1);
5670 return expand_sync_lock_test_and_set (mem, val, target);
5673 /* Expand the __sync_synchronize intrinsic. */
5676 expand_builtin_synchronize (void)
5679 VEC (tree, gc) *v_clobbers;
5681 #ifdef HAVE_memory_barrier
5682 if (HAVE_memory_barrier)
5684 emit_insn (gen_memory_barrier ());
5689 if (synchronize_libfunc != NULL_RTX)
5691 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5695 /* If no explicit memory barrier instruction is available, create an
5696 empty asm stmt with a memory clobber. */
5697 v_clobbers = VEC_alloc (tree, gc, 1);
5698 VEC_quick_push (tree, v_clobbers,
5699 tree_cons (NULL, build_string (6, "memory"), NULL));
5700 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5701 gimple_asm_set_volatile (x, true);
5702 expand_asm_stmt (x);
5705 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5708 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5710 enum insn_code icode;
5712 rtx val = const0_rtx;
5714 /* Expand the operands. */
5715 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5717 /* If there is an explicit operation in the md file, use it. */
5718 icode = direct_optab_handler (sync_lock_release_optab, mode);
5719 if (icode != CODE_FOR_nothing)
5721 if (!insn_data[icode].operand[1].predicate (val, mode))
5722 val = force_reg (mode, val);
5724 insn = GEN_FCN (icode) (mem, val);
5732 /* Otherwise we can implement this operation by emitting a barrier
5733 followed by a store of zero. */
5734 expand_builtin_synchronize ();
5735 emit_move_insn (mem, val);
5738 /* Expand an expression EXP that calls a built-in function,
5739 with result going to TARGET if that's convenient
5740 (and in mode MODE if that's convenient).
5741 SUBTARGET may be used as the target for computing one of EXP's operands.
5742 IGNORE is nonzero if the value is to be ignored. */
5745 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5748 tree fndecl = get_callee_fndecl (exp);
5749 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5750 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5753 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5755 /* When not optimizing, generate calls to library functions for a certain
5758 && !called_as_built_in (fndecl)
5759 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5760 && fcode != BUILT_IN_ALLOCA
5761 && fcode != BUILT_IN_FREE)
5762 return expand_call (exp, target, ignore);
5764 /* The built-in function expanders test for target == const0_rtx
5765 to determine whether the function's result will be ignored. */
5767 target = const0_rtx;
5769 /* If the result of a pure or const built-in function is ignored, and
5770 none of its arguments are volatile, we can avoid expanding the
5771 built-in call and just evaluate the arguments for side-effects. */
5772 if (target == const0_rtx
5773 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5775 bool volatilep = false;
5777 call_expr_arg_iterator iter;
5779 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5780 if (TREE_THIS_VOLATILE (arg))
5788 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5789 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5796 CASE_FLT_FN (BUILT_IN_FABS):
5797 target = expand_builtin_fabs (exp, target, subtarget);
5802 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5803 target = expand_builtin_copysign (exp, target, subtarget);
5808 /* Just do a normal library call if we were unable to fold
5810 CASE_FLT_FN (BUILT_IN_CABS):
5813 CASE_FLT_FN (BUILT_IN_EXP):
5814 CASE_FLT_FN (BUILT_IN_EXP10):
5815 CASE_FLT_FN (BUILT_IN_POW10):
5816 CASE_FLT_FN (BUILT_IN_EXP2):
5817 CASE_FLT_FN (BUILT_IN_EXPM1):
5818 CASE_FLT_FN (BUILT_IN_LOGB):
5819 CASE_FLT_FN (BUILT_IN_LOG):
5820 CASE_FLT_FN (BUILT_IN_LOG10):
5821 CASE_FLT_FN (BUILT_IN_LOG2):
5822 CASE_FLT_FN (BUILT_IN_LOG1P):
5823 CASE_FLT_FN (BUILT_IN_TAN):
5824 CASE_FLT_FN (BUILT_IN_ASIN):
5825 CASE_FLT_FN (BUILT_IN_ACOS):
5826 CASE_FLT_FN (BUILT_IN_ATAN):
5827 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5828 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5829 because of possible accuracy problems. */
5830 if (! flag_unsafe_math_optimizations)
5832 CASE_FLT_FN (BUILT_IN_SQRT):
5833 CASE_FLT_FN (BUILT_IN_FLOOR):
5834 CASE_FLT_FN (BUILT_IN_CEIL):
5835 CASE_FLT_FN (BUILT_IN_TRUNC):
5836 CASE_FLT_FN (BUILT_IN_ROUND):
5837 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5838 CASE_FLT_FN (BUILT_IN_RINT):
5839 target = expand_builtin_mathfn (exp, target, subtarget);
5844 CASE_FLT_FN (BUILT_IN_ILOGB):
5845 if (! flag_unsafe_math_optimizations)
5847 CASE_FLT_FN (BUILT_IN_ISINF):
5848 CASE_FLT_FN (BUILT_IN_FINITE):
5849 case BUILT_IN_ISFINITE:
5850 case BUILT_IN_ISNORMAL:
5851 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5856 CASE_FLT_FN (BUILT_IN_LCEIL):
5857 CASE_FLT_FN (BUILT_IN_LLCEIL):
5858 CASE_FLT_FN (BUILT_IN_LFLOOR):
5859 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5860 target = expand_builtin_int_roundingfn (exp, target);
5865 CASE_FLT_FN (BUILT_IN_LRINT):
5866 CASE_FLT_FN (BUILT_IN_LLRINT):
5867 CASE_FLT_FN (BUILT_IN_LROUND):
5868 CASE_FLT_FN (BUILT_IN_LLROUND):
5869 target = expand_builtin_int_roundingfn_2 (exp, target);
5874 CASE_FLT_FN (BUILT_IN_POW):
5875 target = expand_builtin_pow (exp, target, subtarget);
5880 CASE_FLT_FN (BUILT_IN_POWI):
5881 target = expand_builtin_powi (exp, target, subtarget);
5886 CASE_FLT_FN (BUILT_IN_ATAN2):
5887 CASE_FLT_FN (BUILT_IN_LDEXP):
5888 CASE_FLT_FN (BUILT_IN_SCALB):
5889 CASE_FLT_FN (BUILT_IN_SCALBN):
5890 CASE_FLT_FN (BUILT_IN_SCALBLN):
5891 if (! flag_unsafe_math_optimizations)
5894 CASE_FLT_FN (BUILT_IN_FMOD):
5895 CASE_FLT_FN (BUILT_IN_REMAINDER):
5896 CASE_FLT_FN (BUILT_IN_DREM):
5897 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5902 CASE_FLT_FN (BUILT_IN_CEXPI):
5903 target = expand_builtin_cexpi (exp, target, subtarget);
5904 gcc_assert (target);
5907 CASE_FLT_FN (BUILT_IN_SIN):
5908 CASE_FLT_FN (BUILT_IN_COS):
5909 if (! flag_unsafe_math_optimizations)
5911 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5916 CASE_FLT_FN (BUILT_IN_SINCOS):
5917 if (! flag_unsafe_math_optimizations)
5919 target = expand_builtin_sincos (exp);
5924 case BUILT_IN_APPLY_ARGS:
5925 return expand_builtin_apply_args ();
5927 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5928 FUNCTION with a copy of the parameters described by
5929 ARGUMENTS, and ARGSIZE. It returns a block of memory
5930 allocated on the stack into which is stored all the registers
5931 that might possibly be used for returning the result of a
5932 function. ARGUMENTS is the value returned by
5933 __builtin_apply_args. ARGSIZE is the number of bytes of
5934 arguments that must be copied. ??? How should this value be
5935 computed? We'll also need a safe worst case value for varargs
5937 case BUILT_IN_APPLY:
5938 if (!validate_arglist (exp, POINTER_TYPE,
5939 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5940 && !validate_arglist (exp, REFERENCE_TYPE,
5941 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5947 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5948 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5949 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5951 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5954 /* __builtin_return (RESULT) causes the function to return the
5955 value described by RESULT. RESULT is address of the block of
5956 memory returned by __builtin_apply. */
5957 case BUILT_IN_RETURN:
5958 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5959 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5962 case BUILT_IN_SAVEREGS:
5963 return expand_builtin_saveregs ();
5965 case BUILT_IN_VA_ARG_PACK:
5966 /* All valid uses of __builtin_va_arg_pack () are removed during
5968 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5971 case BUILT_IN_VA_ARG_PACK_LEN:
5972 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5974 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5977 /* Return the address of the first anonymous stack arg. */
5978 case BUILT_IN_NEXT_ARG:
5979 if (fold_builtin_next_arg (exp, false))
5981 return expand_builtin_next_arg ();
5983 case BUILT_IN_CLEAR_CACHE:
5984 target = expand_builtin___clear_cache (exp);
5989 case BUILT_IN_CLASSIFY_TYPE:
5990 return expand_builtin_classify_type (exp);
5992 case BUILT_IN_CONSTANT_P:
5995 case BUILT_IN_FRAME_ADDRESS:
5996 case BUILT_IN_RETURN_ADDRESS:
5997 return expand_builtin_frame_address (fndecl, exp);
5999 /* Returns the address of the area where the structure is returned.
6001 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6002 if (call_expr_nargs (exp) != 0
6003 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6004 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6007 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6009 case BUILT_IN_ALLOCA:
6010 target = expand_builtin_alloca (exp, target);
6015 case BUILT_IN_STACK_SAVE:
6016 return expand_stack_save ();
6018 case BUILT_IN_STACK_RESTORE:
6019 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6022 case BUILT_IN_BSWAP32:
6023 case BUILT_IN_BSWAP64:
6024 target = expand_builtin_bswap (exp, target, subtarget);
6030 CASE_INT_FN (BUILT_IN_FFS):
6031 case BUILT_IN_FFSIMAX:
6032 target = expand_builtin_unop (target_mode, exp, target,
6033 subtarget, ffs_optab);
6038 CASE_INT_FN (BUILT_IN_CLZ):
6039 case BUILT_IN_CLZIMAX:
6040 target = expand_builtin_unop (target_mode, exp, target,
6041 subtarget, clz_optab);
6046 CASE_INT_FN (BUILT_IN_CTZ):
6047 case BUILT_IN_CTZIMAX:
6048 target = expand_builtin_unop (target_mode, exp, target,
6049 subtarget, ctz_optab);
6054 CASE_INT_FN (BUILT_IN_POPCOUNT):
6055 case BUILT_IN_POPCOUNTIMAX:
6056 target = expand_builtin_unop (target_mode, exp, target,
6057 subtarget, popcount_optab);
6062 CASE_INT_FN (BUILT_IN_PARITY):
6063 case BUILT_IN_PARITYIMAX:
6064 target = expand_builtin_unop (target_mode, exp, target,
6065 subtarget, parity_optab);
6070 case BUILT_IN_STRLEN:
6071 target = expand_builtin_strlen (exp, target, target_mode);
6076 case BUILT_IN_STRCPY:
6077 target = expand_builtin_strcpy (exp, target);
6082 case BUILT_IN_STRNCPY:
6083 target = expand_builtin_strncpy (exp, target);
6088 case BUILT_IN_STPCPY:
6089 target = expand_builtin_stpcpy (exp, target, mode);
6094 case BUILT_IN_MEMCPY:
6095 target = expand_builtin_memcpy (exp, target);
6100 case BUILT_IN_MEMPCPY:
6101 target = expand_builtin_mempcpy (exp, target, mode);
6106 case BUILT_IN_MEMSET:
6107 target = expand_builtin_memset (exp, target, mode);
6112 case BUILT_IN_BZERO:
6113 target = expand_builtin_bzero (exp);
6118 case BUILT_IN_STRCMP:
6119 target = expand_builtin_strcmp (exp, target);
6124 case BUILT_IN_STRNCMP:
6125 target = expand_builtin_strncmp (exp, target, mode);
6131 case BUILT_IN_MEMCMP:
6132 target = expand_builtin_memcmp (exp, target, mode);
6137 case BUILT_IN_SETJMP:
6138 /* This should have been lowered to the builtins below. */
6141 case BUILT_IN_SETJMP_SETUP:
6142 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6143 and the receiver label. */
6144 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6146 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6147 VOIDmode, EXPAND_NORMAL);
6148 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6149 rtx label_r = label_rtx (label);
6151 /* This is copied from the handling of non-local gotos. */
6152 expand_builtin_setjmp_setup (buf_addr, label_r);
6153 nonlocal_goto_handler_labels
6154 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6155 nonlocal_goto_handler_labels);
6156 /* ??? Do not let expand_label treat us as such since we would
6157 not want to be both on the list of non-local labels and on
6158 the list of forced labels. */
6159 FORCED_LABEL (label) = 0;
6164 case BUILT_IN_SETJMP_DISPATCHER:
6165 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6166 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6168 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6169 rtx label_r = label_rtx (label);
6171 /* Remove the dispatcher label from the list of non-local labels
6172 since the receiver labels have been added to it above. */
6173 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6178 case BUILT_IN_SETJMP_RECEIVER:
6179 /* __builtin_setjmp_receiver is passed the receiver label. */
6180 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6182 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6183 rtx label_r = label_rtx (label);
6185 expand_builtin_setjmp_receiver (label_r);
6190 /* __builtin_longjmp is passed a pointer to an array of five words.
6191 It's similar to the C library longjmp function but works with
6192 __builtin_setjmp above. */
6193 case BUILT_IN_LONGJMP:
6194 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6196 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6197 VOIDmode, EXPAND_NORMAL);
6198 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6200 if (value != const1_rtx)
6202 error ("%<__builtin_longjmp%> second argument must be 1");
6206 expand_builtin_longjmp (buf_addr, value);
6211 case BUILT_IN_NONLOCAL_GOTO:
6212 target = expand_builtin_nonlocal_goto (exp);
6217 /* This updates the setjmp buffer that is its argument with the value
6218 of the current stack pointer. */
6219 case BUILT_IN_UPDATE_SETJMP_BUF:
6220 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6223 = expand_normal (CALL_EXPR_ARG (exp, 0));
6225 expand_builtin_update_setjmp_buf (buf_addr);
6231 expand_builtin_trap ();
6234 case BUILT_IN_UNREACHABLE:
6235 expand_builtin_unreachable ();
6238 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6239 case BUILT_IN_SIGNBITD32:
6240 case BUILT_IN_SIGNBITD64:
6241 case BUILT_IN_SIGNBITD128:
6242 target = expand_builtin_signbit (exp, target);
6247 /* Various hooks for the DWARF 2 __throw routine. */
6248 case BUILT_IN_UNWIND_INIT:
6249 expand_builtin_unwind_init ();
6251 case BUILT_IN_DWARF_CFA:
6252 return virtual_cfa_rtx;
6253 #ifdef DWARF2_UNWIND_INFO
6254 case BUILT_IN_DWARF_SP_COLUMN:
6255 return expand_builtin_dwarf_sp_column ();
6256 case BUILT_IN_INIT_DWARF_REG_SIZES:
6257 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6260 case BUILT_IN_FROB_RETURN_ADDR:
6261 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6262 case BUILT_IN_EXTRACT_RETURN_ADDR:
6263 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6264 case BUILT_IN_EH_RETURN:
6265 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6266 CALL_EXPR_ARG (exp, 1));
6268 #ifdef EH_RETURN_DATA_REGNO
6269 case BUILT_IN_EH_RETURN_DATA_REGNO:
6270 return expand_builtin_eh_return_data_regno (exp);
6272 case BUILT_IN_EXTEND_POINTER:
6273 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6274 case BUILT_IN_EH_POINTER:
6275 return expand_builtin_eh_pointer (exp);
6276 case BUILT_IN_EH_FILTER:
6277 return expand_builtin_eh_filter (exp);
6278 case BUILT_IN_EH_COPY_VALUES:
6279 return expand_builtin_eh_copy_values (exp);
6281 case BUILT_IN_VA_START:
6282 return expand_builtin_va_start (exp);
6283 case BUILT_IN_VA_END:
6284 return expand_builtin_va_end (exp);
6285 case BUILT_IN_VA_COPY:
6286 return expand_builtin_va_copy (exp);
6287 case BUILT_IN_EXPECT:
6288 return expand_builtin_expect (exp, target);
6289 case BUILT_IN_PREFETCH:
6290 expand_builtin_prefetch (exp);
6293 case BUILT_IN_PROFILE_FUNC_ENTER:
6294 return expand_builtin_profile_func (false);
6295 case BUILT_IN_PROFILE_FUNC_EXIT:
6296 return expand_builtin_profile_func (true);
6298 case BUILT_IN_INIT_TRAMPOLINE:
6299 return expand_builtin_init_trampoline (exp);
6300 case BUILT_IN_ADJUST_TRAMPOLINE:
6301 return expand_builtin_adjust_trampoline (exp);
6304 case BUILT_IN_EXECL:
6305 case BUILT_IN_EXECV:
6306 case BUILT_IN_EXECLP:
6307 case BUILT_IN_EXECLE:
6308 case BUILT_IN_EXECVP:
6309 case BUILT_IN_EXECVE:
6310 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6315 case BUILT_IN_FETCH_AND_ADD_1:
6316 case BUILT_IN_FETCH_AND_ADD_2:
6317 case BUILT_IN_FETCH_AND_ADD_4:
6318 case BUILT_IN_FETCH_AND_ADD_8:
6319 case BUILT_IN_FETCH_AND_ADD_16:
6320 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6321 target = expand_builtin_sync_operation (mode, exp, PLUS,
6322 false, target, ignore);
6327 case BUILT_IN_FETCH_AND_SUB_1:
6328 case BUILT_IN_FETCH_AND_SUB_2:
6329 case BUILT_IN_FETCH_AND_SUB_4:
6330 case BUILT_IN_FETCH_AND_SUB_8:
6331 case BUILT_IN_FETCH_AND_SUB_16:
6332 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6333 target = expand_builtin_sync_operation (mode, exp, MINUS,
6334 false, target, ignore);
6339 case BUILT_IN_FETCH_AND_OR_1:
6340 case BUILT_IN_FETCH_AND_OR_2:
6341 case BUILT_IN_FETCH_AND_OR_4:
6342 case BUILT_IN_FETCH_AND_OR_8:
6343 case BUILT_IN_FETCH_AND_OR_16:
6344 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6345 target = expand_builtin_sync_operation (mode, exp, IOR,
6346 false, target, ignore);
6351 case BUILT_IN_FETCH_AND_AND_1:
6352 case BUILT_IN_FETCH_AND_AND_2:
6353 case BUILT_IN_FETCH_AND_AND_4:
6354 case BUILT_IN_FETCH_AND_AND_8:
6355 case BUILT_IN_FETCH_AND_AND_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6357 target = expand_builtin_sync_operation (mode, exp, AND,
6358 false, target, ignore);
6363 case BUILT_IN_FETCH_AND_XOR_1:
6364 case BUILT_IN_FETCH_AND_XOR_2:
6365 case BUILT_IN_FETCH_AND_XOR_4:
6366 case BUILT_IN_FETCH_AND_XOR_8:
6367 case BUILT_IN_FETCH_AND_XOR_16:
6368 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6369 target = expand_builtin_sync_operation (mode, exp, XOR,
6370 false, target, ignore);
6375 case BUILT_IN_FETCH_AND_NAND_1:
6376 case BUILT_IN_FETCH_AND_NAND_2:
6377 case BUILT_IN_FETCH_AND_NAND_4:
6378 case BUILT_IN_FETCH_AND_NAND_8:
6379 case BUILT_IN_FETCH_AND_NAND_16:
6380 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6381 target = expand_builtin_sync_operation (mode, exp, NOT,
6382 false, target, ignore);
6387 case BUILT_IN_ADD_AND_FETCH_1:
6388 case BUILT_IN_ADD_AND_FETCH_2:
6389 case BUILT_IN_ADD_AND_FETCH_4:
6390 case BUILT_IN_ADD_AND_FETCH_8:
6391 case BUILT_IN_ADD_AND_FETCH_16:
6392 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6393 target = expand_builtin_sync_operation (mode, exp, PLUS,
6394 true, target, ignore);
6399 case BUILT_IN_SUB_AND_FETCH_1:
6400 case BUILT_IN_SUB_AND_FETCH_2:
6401 case BUILT_IN_SUB_AND_FETCH_4:
6402 case BUILT_IN_SUB_AND_FETCH_8:
6403 case BUILT_IN_SUB_AND_FETCH_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6405 target = expand_builtin_sync_operation (mode, exp, MINUS,
6406 true, target, ignore);
6411 case BUILT_IN_OR_AND_FETCH_1:
6412 case BUILT_IN_OR_AND_FETCH_2:
6413 case BUILT_IN_OR_AND_FETCH_4:
6414 case BUILT_IN_OR_AND_FETCH_8:
6415 case BUILT_IN_OR_AND_FETCH_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6417 target = expand_builtin_sync_operation (mode, exp, IOR,
6418 true, target, ignore);
6423 case BUILT_IN_AND_AND_FETCH_1:
6424 case BUILT_IN_AND_AND_FETCH_2:
6425 case BUILT_IN_AND_AND_FETCH_4:
6426 case BUILT_IN_AND_AND_FETCH_8:
6427 case BUILT_IN_AND_AND_FETCH_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6429 target = expand_builtin_sync_operation (mode, exp, AND,
6430 true, target, ignore);
6435 case BUILT_IN_XOR_AND_FETCH_1:
6436 case BUILT_IN_XOR_AND_FETCH_2:
6437 case BUILT_IN_XOR_AND_FETCH_4:
6438 case BUILT_IN_XOR_AND_FETCH_8:
6439 case BUILT_IN_XOR_AND_FETCH_16:
6440 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6441 target = expand_builtin_sync_operation (mode, exp, XOR,
6442 true, target, ignore);
6447 case BUILT_IN_NAND_AND_FETCH_1:
6448 case BUILT_IN_NAND_AND_FETCH_2:
6449 case BUILT_IN_NAND_AND_FETCH_4:
6450 case BUILT_IN_NAND_AND_FETCH_8:
6451 case BUILT_IN_NAND_AND_FETCH_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6453 target = expand_builtin_sync_operation (mode, exp, NOT,
6454 true, target, ignore);
6459 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6460 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6461 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6462 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6463 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6464 if (mode == VOIDmode)
6465 mode = TYPE_MODE (boolean_type_node);
6466 if (!target || !register_operand (target, mode))
6467 target = gen_reg_rtx (mode);
6469 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6470 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6475 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6476 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6477 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6478 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6479 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6480 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6481 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6486 case BUILT_IN_LOCK_TEST_AND_SET_1:
6487 case BUILT_IN_LOCK_TEST_AND_SET_2:
6488 case BUILT_IN_LOCK_TEST_AND_SET_4:
6489 case BUILT_IN_LOCK_TEST_AND_SET_8:
6490 case BUILT_IN_LOCK_TEST_AND_SET_16:
6491 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6492 target = expand_builtin_lock_test_and_set (mode, exp, target);
6497 case BUILT_IN_LOCK_RELEASE_1:
6498 case BUILT_IN_LOCK_RELEASE_2:
6499 case BUILT_IN_LOCK_RELEASE_4:
6500 case BUILT_IN_LOCK_RELEASE_8:
6501 case BUILT_IN_LOCK_RELEASE_16:
6502 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6503 expand_builtin_lock_release (mode, exp);
6506 case BUILT_IN_SYNCHRONIZE:
6507 expand_builtin_synchronize ();
6510 case BUILT_IN_OBJECT_SIZE:
6511 return expand_builtin_object_size (exp);
6513 case BUILT_IN_MEMCPY_CHK:
6514 case BUILT_IN_MEMPCPY_CHK:
6515 case BUILT_IN_MEMMOVE_CHK:
6516 case BUILT_IN_MEMSET_CHK:
6517 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6522 case BUILT_IN_STRCPY_CHK:
6523 case BUILT_IN_STPCPY_CHK:
6524 case BUILT_IN_STRNCPY_CHK:
6525 case BUILT_IN_STRCAT_CHK:
6526 case BUILT_IN_STRNCAT_CHK:
6527 case BUILT_IN_SNPRINTF_CHK:
6528 case BUILT_IN_VSNPRINTF_CHK:
6529 maybe_emit_chk_warning (exp, fcode);
6532 case BUILT_IN_SPRINTF_CHK:
6533 case BUILT_IN_VSPRINTF_CHK:
6534 maybe_emit_sprintf_chk_warning (exp, fcode);
6538 maybe_emit_free_warning (exp);
6541 default: /* just do library call, if unknown builtin */
6545 /* The switch statement above can drop through to cause the function
6546 to be called normally. */
6547 return expand_call (exp, target, ignore);
6550 /* Determine whether a tree node represents a call to a built-in
6551 function. If the tree T is a call to a built-in function with
6552 the right number of arguments of the appropriate types, return
6553 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6554 Otherwise the return value is END_BUILTINS. */
6556 enum built_in_function
6557 builtin_mathfn_code (const_tree t)
6559 const_tree fndecl, arg, parmlist;
6560 const_tree argtype, parmtype;
6561 const_call_expr_arg_iterator iter;
6563 if (TREE_CODE (t) != CALL_EXPR
6564 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6565 return END_BUILTINS;
6567 fndecl = get_callee_fndecl (t);
6568 if (fndecl == NULL_TREE
6569 || TREE_CODE (fndecl) != FUNCTION_DECL
6570 || ! DECL_BUILT_IN (fndecl)
6571 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6572 return END_BUILTINS;
6574 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6575 init_const_call_expr_arg_iterator (t, &iter);
6576 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6578 /* If a function doesn't take a variable number of arguments,
6579 the last element in the list will have type `void'. */
6580 parmtype = TREE_VALUE (parmlist);
6581 if (VOID_TYPE_P (parmtype))
6583 if (more_const_call_expr_args_p (&iter))
6584 return END_BUILTINS;
6585 return DECL_FUNCTION_CODE (fndecl);
6588 if (! more_const_call_expr_args_p (&iter))
6589 return END_BUILTINS;
6591 arg = next_const_call_expr_arg (&iter);
6592 argtype = TREE_TYPE (arg);
6594 if (SCALAR_FLOAT_TYPE_P (parmtype))
6596 if (! SCALAR_FLOAT_TYPE_P (argtype))
6597 return END_BUILTINS;
6599 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6601 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6602 return END_BUILTINS;
6604 else if (POINTER_TYPE_P (parmtype))
6606 if (! POINTER_TYPE_P (argtype))
6607 return END_BUILTINS;
6609 else if (INTEGRAL_TYPE_P (parmtype))
6611 if (! INTEGRAL_TYPE_P (argtype))
6612 return END_BUILTINS;
6615 return END_BUILTINS;
6618 /* Variable-length argument list. */
6619 return DECL_FUNCTION_CODE (fndecl);
6622 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6623 evaluate to a constant. */
6626 fold_builtin_constant_p (tree arg)
6628 /* We return 1 for a numeric type that's known to be a constant
6629 value at compile-time or for an aggregate type that's a
6630 literal constant. */
6633 /* If we know this is a constant, emit the constant of one. */
6634 if (CONSTANT_CLASS_P (arg)
6635 || (TREE_CODE (arg) == CONSTRUCTOR
6636 && TREE_CONSTANT (arg)))
6637 return integer_one_node;
6638 if (TREE_CODE (arg) == ADDR_EXPR)
6640 tree op = TREE_OPERAND (arg, 0);
6641 if (TREE_CODE (op) == STRING_CST
6642 || (TREE_CODE (op) == ARRAY_REF
6643 && integer_zerop (TREE_OPERAND (op, 1))
6644 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6645 return integer_one_node;
6648 /* If this expression has side effects, show we don't know it to be a
6649 constant. Likewise if it's a pointer or aggregate type since in
6650 those case we only want literals, since those are only optimized
6651 when generating RTL, not later.
6652 And finally, if we are compiling an initializer, not code, we
6653 need to return a definite result now; there's not going to be any
6654 more optimization done. */
6655 if (TREE_SIDE_EFFECTS (arg)
6656 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6657 || POINTER_TYPE_P (TREE_TYPE (arg))
6659 || folding_initializer)
6660 return integer_zero_node;
6665 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6666 return it as a truthvalue. */
6669 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6671 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6673 fn = built_in_decls[BUILT_IN_EXPECT];
6674 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6675 ret_type = TREE_TYPE (TREE_TYPE (fn));
6676 pred_type = TREE_VALUE (arg_types);
6677 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6679 pred = fold_convert_loc (loc, pred_type, pred);
6680 expected = fold_convert_loc (loc, expected_type, expected);
6681 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6683 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6684 build_int_cst (ret_type, 0));
6687 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6688 NULL_TREE if no simplification is possible. */
6691 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6694 enum tree_code code;
6696 /* If this is a builtin_expect within a builtin_expect keep the
6697 inner one. See through a comparison against a constant. It
6698 might have been added to create a thruthvalue. */
6700 if (COMPARISON_CLASS_P (inner)
6701 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6702 inner = TREE_OPERAND (inner, 0);
6704 if (TREE_CODE (inner) == CALL_EXPR
6705 && (fndecl = get_callee_fndecl (inner))
6706 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6707 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6710 /* Distribute the expected value over short-circuiting operators.
6711 See through the cast from truthvalue_type_node to long. */
6713 while (TREE_CODE (inner) == NOP_EXPR
6714 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6715 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6716 inner = TREE_OPERAND (inner, 0);
6718 code = TREE_CODE (inner);
6719 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6721 tree op0 = TREE_OPERAND (inner, 0);
6722 tree op1 = TREE_OPERAND (inner, 1);
6724 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6725 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6726 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6728 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6731 /* If the argument isn't invariant then there's nothing else we can do. */
6732 if (!TREE_CONSTANT (arg0))
6735 /* If we expect that a comparison against the argument will fold to
6736 a constant return the constant. In practice, this means a true
6737 constant or the address of a non-weak symbol. */
6740 if (TREE_CODE (inner) == ADDR_EXPR)
6744 inner = TREE_OPERAND (inner, 0);
6746 while (TREE_CODE (inner) == COMPONENT_REF
6747 || TREE_CODE (inner) == ARRAY_REF);
6748 if ((TREE_CODE (inner) == VAR_DECL
6749 || TREE_CODE (inner) == FUNCTION_DECL)
6750 && DECL_WEAK (inner))
6754 /* Otherwise, ARG0 already has the proper type for the return value. */
6758 /* Fold a call to __builtin_classify_type with argument ARG. */
6761 fold_builtin_classify_type (tree arg)
6764 return build_int_cst (NULL_TREE, no_type_class);
6766 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6769 /* Fold a call to __builtin_strlen with argument ARG. */
6772 fold_builtin_strlen (location_t loc, tree type, tree arg)
6774 if (!validate_arg (arg, POINTER_TYPE))
6778 tree len = c_strlen (arg, 0);
6781 return fold_convert_loc (loc, type, len);
6787 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6790 fold_builtin_inf (location_t loc, tree type, int warn)
6792 REAL_VALUE_TYPE real;
6794 /* __builtin_inff is intended to be usable to define INFINITY on all
6795 targets. If an infinity is not available, INFINITY expands "to a
6796 positive constant of type float that overflows at translation
6797 time", footnote "In this case, using INFINITY will violate the
6798 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6799 Thus we pedwarn to ensure this constraint violation is
6801 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6802 pedwarn (loc, 0, "target format does not support infinity");
6805 return build_real (type, real);
6808 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6811 fold_builtin_nan (tree arg, tree type, int quiet)
6813 REAL_VALUE_TYPE real;
6816 if (!validate_arg (arg, POINTER_TYPE))
6818 str = c_getstr (arg);
6822 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6825 return build_real (type, real);
6828 /* Return true if the floating point expression T has an integer value.
6829 We also allow +Inf, -Inf and NaN to be considered integer values. */
6832 integer_valued_real_p (tree t)
6834 switch (TREE_CODE (t))
6841 return integer_valued_real_p (TREE_OPERAND (t, 0));
6846 return integer_valued_real_p (TREE_OPERAND (t, 1));
6853 return integer_valued_real_p (TREE_OPERAND (t, 0))
6854 && integer_valued_real_p (TREE_OPERAND (t, 1));
6857 return integer_valued_real_p (TREE_OPERAND (t, 1))
6858 && integer_valued_real_p (TREE_OPERAND (t, 2));
6861 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6865 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6866 if (TREE_CODE (type) == INTEGER_TYPE)
6868 if (TREE_CODE (type) == REAL_TYPE)
6869 return integer_valued_real_p (TREE_OPERAND (t, 0));
6874 switch (builtin_mathfn_code (t))
6876 CASE_FLT_FN (BUILT_IN_CEIL):
6877 CASE_FLT_FN (BUILT_IN_FLOOR):
6878 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6879 CASE_FLT_FN (BUILT_IN_RINT):
6880 CASE_FLT_FN (BUILT_IN_ROUND):
6881 CASE_FLT_FN (BUILT_IN_TRUNC):
6884 CASE_FLT_FN (BUILT_IN_FMIN):
6885 CASE_FLT_FN (BUILT_IN_FMAX):
6886 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6887 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6900 /* FNDECL is assumed to be a builtin where truncation can be propagated
6901 across (for instance floor((double)f) == (double)floorf (f).
6902 Do the transformation for a call with argument ARG. */
6905 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6907 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6909 if (!validate_arg (arg, REAL_TYPE))
6912 /* Integer rounding functions are idempotent. */
6913 if (fcode == builtin_mathfn_code (arg))
6916 /* If argument is already integer valued, and we don't need to worry
6917 about setting errno, there's no need to perform rounding. */
6918 if (! flag_errno_math && integer_valued_real_p (arg))
6923 tree arg0 = strip_float_extensions (arg);
6924 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6925 tree newtype = TREE_TYPE (arg0);
6928 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6929 && (decl = mathfn_built_in (newtype, fcode)))
6930 return fold_convert_loc (loc, ftype,
6931 build_call_expr_loc (loc, decl, 1,
6932 fold_convert_loc (loc,
6939 /* FNDECL is assumed to be builtin which can narrow the FP type of
6940 the argument, for instance lround((double)f) -> lroundf (f).
6941 Do the transformation for a call with argument ARG. */
6944 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6946 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6948 if (!validate_arg (arg, REAL_TYPE))
6951 /* If argument is already integer valued, and we don't need to worry
6952 about setting errno, there's no need to perform rounding. */
6953 if (! flag_errno_math && integer_valued_real_p (arg))
6954 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6955 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6959 tree ftype = TREE_TYPE (arg);
6960 tree arg0 = strip_float_extensions (arg);
6961 tree newtype = TREE_TYPE (arg0);
6964 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6965 && (decl = mathfn_built_in (newtype, fcode)))
6966 return build_call_expr_loc (loc, decl, 1,
6967 fold_convert_loc (loc, newtype, arg0));
6970 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6971 sizeof (long long) == sizeof (long). */
6972 if (TYPE_PRECISION (long_long_integer_type_node)
6973 == TYPE_PRECISION (long_integer_type_node))
6975 tree newfn = NULL_TREE;
6978 CASE_FLT_FN (BUILT_IN_LLCEIL):
6979 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6982 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6983 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6986 CASE_FLT_FN (BUILT_IN_LLROUND):
6987 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6990 CASE_FLT_FN (BUILT_IN_LLRINT):
6991 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7000 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7001 return fold_convert_loc (loc,
7002 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7009 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7010 return type. Return NULL_TREE if no simplification can be made. */
7013 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7017 if (!validate_arg (arg, COMPLEX_TYPE)
7018 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7021 /* Calculate the result when the argument is a constant. */
7022 if (TREE_CODE (arg) == COMPLEX_CST
7023 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7027 if (TREE_CODE (arg) == COMPLEX_EXPR)
7029 tree real = TREE_OPERAND (arg, 0);
7030 tree imag = TREE_OPERAND (arg, 1);
7032 /* If either part is zero, cabs is fabs of the other. */
7033 if (real_zerop (real))
7034 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7035 if (real_zerop (imag))
7036 return fold_build1_loc (loc, ABS_EXPR, type, real);
7038 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7039 if (flag_unsafe_math_optimizations
7040 && operand_equal_p (real, imag, OEP_PURE_SAME))
7042 const REAL_VALUE_TYPE sqrt2_trunc
7043 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7045 return fold_build2_loc (loc, MULT_EXPR, type,
7046 fold_build1_loc (loc, ABS_EXPR, type, real),
7047 build_real (type, sqrt2_trunc));
7051 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7052 if (TREE_CODE (arg) == NEGATE_EXPR
7053 || TREE_CODE (arg) == CONJ_EXPR)
7054 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7056 /* Don't do this when optimizing for size. */
7057 if (flag_unsafe_math_optimizations
7058 && optimize && optimize_function_for_speed_p (cfun))
7060 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7062 if (sqrtfn != NULL_TREE)
7064 tree rpart, ipart, result;
7066 arg = builtin_save_expr (arg);
7068 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7069 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7071 rpart = builtin_save_expr (rpart);
7072 ipart = builtin_save_expr (ipart);
7074 result = fold_build2_loc (loc, PLUS_EXPR, type,
7075 fold_build2_loc (loc, MULT_EXPR, type,
7077 fold_build2_loc (loc, MULT_EXPR, type,
7080 return build_call_expr_loc (loc, sqrtfn, 1, result);
7087 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7088 complex tree type of the result. If NEG is true, the imaginary
7089 zero is negative. */
7092 build_complex_cproj (tree type, bool neg)
7094 REAL_VALUE_TYPE rinf, rzero = dconst0;
7098 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7099 build_real (TREE_TYPE (type), rzero));
7102 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7103 return type. Return NULL_TREE if no simplification can be made. */
7106 fold_builtin_cproj (location_t loc, tree arg, tree type)
7108 if (!validate_arg (arg, COMPLEX_TYPE)
7109 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7112 /* If there are no infinities, return arg. */
7113 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7114 return non_lvalue_loc (loc, arg);
7116 /* Calculate the result when the argument is a constant. */
7117 if (TREE_CODE (arg) == COMPLEX_CST)
7119 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7120 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7122 if (real_isinf (real) || real_isinf (imag))
7123 return build_complex_cproj (type, imag->sign);
7127 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7129 tree real = TREE_OPERAND (arg, 0);
7130 tree imag = TREE_OPERAND (arg, 1);
7135 /* If the real part is inf and the imag part is known to be
7136 nonnegative, return (inf + 0i). Remember side-effects are
7137 possible in the imag part. */
7138 if (TREE_CODE (real) == REAL_CST
7139 && real_isinf (TREE_REAL_CST_PTR (real))
7140 && tree_expr_nonnegative_p (imag))
7141 return omit_one_operand_loc (loc, type,
7142 build_complex_cproj (type, false),
7145 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7146 Remember side-effects are possible in the real part. */
7147 if (TREE_CODE (imag) == REAL_CST
7148 && real_isinf (TREE_REAL_CST_PTR (imag)))
7150 omit_one_operand_loc (loc, type,
7151 build_complex_cproj (type, TREE_REAL_CST_PTR
7152 (imag)->sign), arg);
7158 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7159 Return NULL_TREE if no simplification can be made. */
7162 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7165 enum built_in_function fcode;
7168 if (!validate_arg (arg, REAL_TYPE))
7171 /* Calculate the result when the argument is a constant. */
7172 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7175 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7176 fcode = builtin_mathfn_code (arg);
7177 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7179 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7180 arg = fold_build2_loc (loc, MULT_EXPR, type,
7181 CALL_EXPR_ARG (arg, 0),
7182 build_real (type, dconsthalf));
7183 return build_call_expr_loc (loc, expfn, 1, arg);
7186 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7187 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7189 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7193 tree arg0 = CALL_EXPR_ARG (arg, 0);
7195 /* The inner root was either sqrt or cbrt. */
7196 /* This was a conditional expression but it triggered a bug
7198 REAL_VALUE_TYPE dconstroot;
7199 if (BUILTIN_SQRT_P (fcode))
7200 dconstroot = dconsthalf;
7202 dconstroot = dconst_third ();
7204 /* Adjust for the outer root. */
7205 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7206 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7207 tree_root = build_real (type, dconstroot);
7208 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7212 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7213 if (flag_unsafe_math_optimizations
7214 && (fcode == BUILT_IN_POW
7215 || fcode == BUILT_IN_POWF
7216 || fcode == BUILT_IN_POWL))
7218 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7219 tree arg0 = CALL_EXPR_ARG (arg, 0);
7220 tree arg1 = CALL_EXPR_ARG (arg, 1);
7222 if (!tree_expr_nonnegative_p (arg0))
7223 arg0 = build1 (ABS_EXPR, type, arg0);
7224 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7225 build_real (type, dconsthalf));
7226 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7232 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7233 Return NULL_TREE if no simplification can be made. */
7236 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7238 const enum built_in_function fcode = builtin_mathfn_code (arg);
7241 if (!validate_arg (arg, REAL_TYPE))
7244 /* Calculate the result when the argument is a constant. */
7245 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7248 if (flag_unsafe_math_optimizations)
7250 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7251 if (BUILTIN_EXPONENT_P (fcode))
7253 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7254 const REAL_VALUE_TYPE third_trunc =
7255 real_value_truncate (TYPE_MODE (type), dconst_third ());
7256 arg = fold_build2_loc (loc, MULT_EXPR, type,
7257 CALL_EXPR_ARG (arg, 0),
7258 build_real (type, third_trunc));
7259 return build_call_expr_loc (loc, expfn, 1, arg);
7262 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7263 if (BUILTIN_SQRT_P (fcode))
7265 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7269 tree arg0 = CALL_EXPR_ARG (arg, 0);
7271 REAL_VALUE_TYPE dconstroot = dconst_third ();
7273 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7274 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7275 tree_root = build_real (type, dconstroot);
7276 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7280 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7281 if (BUILTIN_CBRT_P (fcode))
7283 tree arg0 = CALL_EXPR_ARG (arg, 0);
7284 if (tree_expr_nonnegative_p (arg0))
7286 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7291 REAL_VALUE_TYPE dconstroot;
7293 real_arithmetic (&dconstroot, MULT_EXPR,
7294 dconst_third_ptr (), dconst_third_ptr ());
7295 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7296 tree_root = build_real (type, dconstroot);
7297 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7302 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7303 if (fcode == BUILT_IN_POW
7304 || fcode == BUILT_IN_POWF
7305 || fcode == BUILT_IN_POWL)
7307 tree arg00 = CALL_EXPR_ARG (arg, 0);
7308 tree arg01 = CALL_EXPR_ARG (arg, 1);
7309 if (tree_expr_nonnegative_p (arg00))
7311 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7312 const REAL_VALUE_TYPE dconstroot
7313 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7314 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7315 build_real (type, dconstroot));
7316 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7323 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7324 TYPE is the type of the return value. Return NULL_TREE if no
7325 simplification can be made. */
7328 fold_builtin_cos (location_t loc,
7329 tree arg, tree type, tree fndecl)
7333 if (!validate_arg (arg, REAL_TYPE))
7336 /* Calculate the result when the argument is a constant. */
7337 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7340 /* Optimize cos(-x) into cos (x). */
7341 if ((narg = fold_strip_sign_ops (arg)))
7342 return build_call_expr_loc (loc, fndecl, 1, narg);
7347 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7348 Return NULL_TREE if no simplification can be made. */
7351 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7353 if (validate_arg (arg, REAL_TYPE))
7357 /* Calculate the result when the argument is a constant. */
7358 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7361 /* Optimize cosh(-x) into cosh (x). */
7362 if ((narg = fold_strip_sign_ops (arg)))
7363 return build_call_expr_loc (loc, fndecl, 1, narg);
7369 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7370 argument ARG. TYPE is the type of the return value. Return
7371 NULL_TREE if no simplification can be made. */
7374 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7377 if (validate_arg (arg, COMPLEX_TYPE)
7378 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7382 /* Calculate the result when the argument is a constant. */
7383 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7386 /* Optimize fn(-x) into fn(x). */
7387 if ((tmp = fold_strip_sign_ops (arg)))
7388 return build_call_expr_loc (loc, fndecl, 1, tmp);
7394 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7395 Return NULL_TREE if no simplification can be made. */
7398 fold_builtin_tan (tree arg, tree type)
7400 enum built_in_function fcode;
7403 if (!validate_arg (arg, REAL_TYPE))
7406 /* Calculate the result when the argument is a constant. */
7407 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7410 /* Optimize tan(atan(x)) = x. */
7411 fcode = builtin_mathfn_code (arg);
7412 if (flag_unsafe_math_optimizations
7413 && (fcode == BUILT_IN_ATAN
7414 || fcode == BUILT_IN_ATANF
7415 || fcode == BUILT_IN_ATANL))
7416 return CALL_EXPR_ARG (arg, 0);
7421 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7422 NULL_TREE if no simplification can be made. */
7425 fold_builtin_sincos (location_t loc,
7426 tree arg0, tree arg1, tree arg2)
7431 if (!validate_arg (arg0, REAL_TYPE)
7432 || !validate_arg (arg1, POINTER_TYPE)
7433 || !validate_arg (arg2, POINTER_TYPE))
7436 type = TREE_TYPE (arg0);
7438 /* Calculate the result when the argument is a constant. */
7439 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7442 /* Canonicalize sincos to cexpi. */
7443 if (!TARGET_C99_FUNCTIONS)
7445 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7449 call = build_call_expr_loc (loc, fn, 1, arg0);
7450 call = builtin_save_expr (call);
7452 return build2 (COMPOUND_EXPR, void_type_node,
7453 build2 (MODIFY_EXPR, void_type_node,
7454 build_fold_indirect_ref_loc (loc, arg1),
7455 build1 (IMAGPART_EXPR, type, call)),
7456 build2 (MODIFY_EXPR, void_type_node,
7457 build_fold_indirect_ref_loc (loc, arg2),
7458 build1 (REALPART_EXPR, type, call)));
7461 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7462 NULL_TREE if no simplification can be made. */
7465 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7468 tree realp, imagp, ifn;
7471 if (!validate_arg (arg0, COMPLEX_TYPE)
7472 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7475 /* Calculate the result when the argument is a constant. */
7476 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7479 rtype = TREE_TYPE (TREE_TYPE (arg0));
7481 /* In case we can figure out the real part of arg0 and it is constant zero
7483 if (!TARGET_C99_FUNCTIONS)
7485 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7489 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7490 && real_zerop (realp))
7492 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7493 return build_call_expr_loc (loc, ifn, 1, narg);
7496 /* In case we can easily decompose real and imaginary parts split cexp
7497 to exp (r) * cexpi (i). */
7498 if (flag_unsafe_math_optimizations
7501 tree rfn, rcall, icall;
7503 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7507 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7511 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7512 icall = builtin_save_expr (icall);
7513 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7514 rcall = builtin_save_expr (rcall);
7515 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7516 fold_build2_loc (loc, MULT_EXPR, rtype,
7518 fold_build1_loc (loc, REALPART_EXPR,
7520 fold_build2_loc (loc, MULT_EXPR, rtype,
7522 fold_build1_loc (loc, IMAGPART_EXPR,
7529 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7530 Return NULL_TREE if no simplification can be made. */
7533 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7535 if (!validate_arg (arg, REAL_TYPE))
7538 /* Optimize trunc of constant value. */
7539 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7541 REAL_VALUE_TYPE r, x;
7542 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7544 x = TREE_REAL_CST (arg);
7545 real_trunc (&r, TYPE_MODE (type), &x);
7546 return build_real (type, r);
7549 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7552 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7553 Return NULL_TREE if no simplification can be made. */
7556 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7558 if (!validate_arg (arg, REAL_TYPE))
7561 /* Optimize floor of constant value. */
7562 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7566 x = TREE_REAL_CST (arg);
7567 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7569 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7572 real_floor (&r, TYPE_MODE (type), &x);
7573 return build_real (type, r);
7577 /* Fold floor (x) where x is nonnegative to trunc (x). */
7578 if (tree_expr_nonnegative_p (arg))
7580 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7582 return build_call_expr_loc (loc, truncfn, 1, arg);
7585 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7588 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7589 Return NULL_TREE if no simplification can be made. */
7592 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7594 if (!validate_arg (arg, REAL_TYPE))
7597 /* Optimize ceil of constant value. */
7598 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7602 x = TREE_REAL_CST (arg);
7603 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7605 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7608 real_ceil (&r, TYPE_MODE (type), &x);
7609 return build_real (type, r);
7613 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7616 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7617 Return NULL_TREE if no simplification can be made. */
7620 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7622 if (!validate_arg (arg, REAL_TYPE))
7625 /* Optimize round of constant value. */
7626 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7630 x = TREE_REAL_CST (arg);
7631 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7633 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7636 real_round (&r, TYPE_MODE (type), &x);
7637 return build_real (type, r);
7641 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7644 /* Fold function call to builtin lround, lroundf or lroundl (or the
7645 corresponding long long versions) and other rounding functions. ARG
7646 is the argument to the call. Return NULL_TREE if no simplification
7650 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7652 if (!validate_arg (arg, REAL_TYPE))
7655 /* Optimize lround of constant value. */
7656 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7658 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7660 if (real_isfinite (&x))
7662 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7663 tree ftype = TREE_TYPE (arg);
7667 switch (DECL_FUNCTION_CODE (fndecl))
7669 CASE_FLT_FN (BUILT_IN_LFLOOR):
7670 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7671 real_floor (&r, TYPE_MODE (ftype), &x);
7674 CASE_FLT_FN (BUILT_IN_LCEIL):
7675 CASE_FLT_FN (BUILT_IN_LLCEIL):
7676 real_ceil (&r, TYPE_MODE (ftype), &x);
7679 CASE_FLT_FN (BUILT_IN_LROUND):
7680 CASE_FLT_FN (BUILT_IN_LLROUND):
7681 real_round (&r, TYPE_MODE (ftype), &x);
7688 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7689 if (double_int_fits_to_tree_p (itype, val))
7690 return double_int_to_tree (itype, val);
7694 switch (DECL_FUNCTION_CODE (fndecl))
7696 CASE_FLT_FN (BUILT_IN_LFLOOR):
7697 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7698 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7699 if (tree_expr_nonnegative_p (arg))
7700 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7701 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7706 return fold_fixed_mathfn (loc, fndecl, arg);
7709 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7710 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7711 the argument to the call. Return NULL_TREE if no simplification can
7715 fold_builtin_bitop (tree fndecl, tree arg)
7717 if (!validate_arg (arg, INTEGER_TYPE))
7720 /* Optimize for constant argument. */
7721 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7723 HOST_WIDE_INT hi, width, result;
7724 unsigned HOST_WIDE_INT lo;
7727 type = TREE_TYPE (arg);
7728 width = TYPE_PRECISION (type);
7729 lo = TREE_INT_CST_LOW (arg);
7731 /* Clear all the bits that are beyond the type's precision. */
7732 if (width > HOST_BITS_PER_WIDE_INT)
7734 hi = TREE_INT_CST_HIGH (arg);
7735 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7736 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7741 if (width < HOST_BITS_PER_WIDE_INT)
7742 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7745 switch (DECL_FUNCTION_CODE (fndecl))
7747 CASE_INT_FN (BUILT_IN_FFS):
7749 result = ffs_hwi (lo);
7751 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7756 CASE_INT_FN (BUILT_IN_CLZ):
7758 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7760 result = width - floor_log2 (lo) - 1;
7761 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7765 CASE_INT_FN (BUILT_IN_CTZ):
7767 result = ctz_hwi (lo);
7769 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7770 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7774 CASE_INT_FN (BUILT_IN_POPCOUNT):
7777 result++, lo &= lo - 1;
7779 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7782 CASE_INT_FN (BUILT_IN_PARITY):
7785 result++, lo &= lo - 1;
7787 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7795 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7801 /* Fold function call to builtin_bswap and the long and long long
7802 variants. Return NULL_TREE if no simplification can be made. */
7804 fold_builtin_bswap (tree fndecl, tree arg)
7806 if (! validate_arg (arg, INTEGER_TYPE))
7809 /* Optimize constant value. */
7810 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7812 HOST_WIDE_INT hi, width, r_hi = 0;
7813 unsigned HOST_WIDE_INT lo, r_lo = 0;
7816 type = TREE_TYPE (arg);
7817 width = TYPE_PRECISION (type);
7818 lo = TREE_INT_CST_LOW (arg);
7819 hi = TREE_INT_CST_HIGH (arg);
7821 switch (DECL_FUNCTION_CODE (fndecl))
7823 case BUILT_IN_BSWAP32:
7824 case BUILT_IN_BSWAP64:
7828 for (s = 0; s < width; s += 8)
7830 int d = width - s - 8;
7831 unsigned HOST_WIDE_INT byte;
7833 if (s < HOST_BITS_PER_WIDE_INT)
7834 byte = (lo >> s) & 0xff;
7836 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7838 if (d < HOST_BITS_PER_WIDE_INT)
7841 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7851 if (width < HOST_BITS_PER_WIDE_INT)
7852 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7854 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7860 /* A subroutine of fold_builtin to fold the various logarithmic
7861 functions. Return NULL_TREE if no simplification can me made.
7862 FUNC is the corresponding MPFR logarithm function. */
7865 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7866 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7868 if (validate_arg (arg, REAL_TYPE))
7870 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7872 const enum built_in_function fcode = builtin_mathfn_code (arg);
7874 /* Calculate the result when the argument is a constant. */
7875 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7878 /* Special case, optimize logN(expN(x)) = x. */
7879 if (flag_unsafe_math_optimizations
7880 && ((func == mpfr_log
7881 && (fcode == BUILT_IN_EXP
7882 || fcode == BUILT_IN_EXPF
7883 || fcode == BUILT_IN_EXPL))
7884 || (func == mpfr_log2
7885 && (fcode == BUILT_IN_EXP2
7886 || fcode == BUILT_IN_EXP2F
7887 || fcode == BUILT_IN_EXP2L))
7888 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7889 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7891 /* Optimize logN(func()) for various exponential functions. We
7892 want to determine the value "x" and the power "exponent" in
7893 order to transform logN(x**exponent) into exponent*logN(x). */
7894 if (flag_unsafe_math_optimizations)
7896 tree exponent = 0, x = 0;
7900 CASE_FLT_FN (BUILT_IN_EXP):
7901 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7902 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7904 exponent = CALL_EXPR_ARG (arg, 0);
7906 CASE_FLT_FN (BUILT_IN_EXP2):
7907 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7908 x = build_real (type, dconst2);
7909 exponent = CALL_EXPR_ARG (arg, 0);
7911 CASE_FLT_FN (BUILT_IN_EXP10):
7912 CASE_FLT_FN (BUILT_IN_POW10):
7913 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7915 REAL_VALUE_TYPE dconst10;
7916 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7917 x = build_real (type, dconst10);
7919 exponent = CALL_EXPR_ARG (arg, 0);
7921 CASE_FLT_FN (BUILT_IN_SQRT):
7922 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7923 x = CALL_EXPR_ARG (arg, 0);
7924 exponent = build_real (type, dconsthalf);
7926 CASE_FLT_FN (BUILT_IN_CBRT):
7927 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7928 x = CALL_EXPR_ARG (arg, 0);
7929 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7932 CASE_FLT_FN (BUILT_IN_POW):
7933 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7934 x = CALL_EXPR_ARG (arg, 0);
7935 exponent = CALL_EXPR_ARG (arg, 1);
7941 /* Now perform the optimization. */
7944 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7945 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7953 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7954 NULL_TREE if no simplification can be made. */
7957 fold_builtin_hypot (location_t loc, tree fndecl,
7958 tree arg0, tree arg1, tree type)
7960 tree res, narg0, narg1;
7962 if (!validate_arg (arg0, REAL_TYPE)
7963 || !validate_arg (arg1, REAL_TYPE))
7966 /* Calculate the result when the argument is a constant. */
7967 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7970 /* If either argument to hypot has a negate or abs, strip that off.
7971 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7972 narg0 = fold_strip_sign_ops (arg0);
7973 narg1 = fold_strip_sign_ops (arg1);
7976 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7977 narg1 ? narg1 : arg1);
7980 /* If either argument is zero, hypot is fabs of the other. */
7981 if (real_zerop (arg0))
7982 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7983 else if (real_zerop (arg1))
7984 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7986 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7987 if (flag_unsafe_math_optimizations
7988 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7990 const REAL_VALUE_TYPE sqrt2_trunc
7991 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7992 return fold_build2_loc (loc, MULT_EXPR, type,
7993 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7994 build_real (type, sqrt2_trunc));
8001 /* Fold a builtin function call to pow, powf, or powl. Return
8002 NULL_TREE if no simplification can be made. */
8004 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8008 if (!validate_arg (arg0, REAL_TYPE)
8009 || !validate_arg (arg1, REAL_TYPE))
8012 /* Calculate the result when the argument is a constant. */
8013 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8016 /* Optimize pow(1.0,y) = 1.0. */
8017 if (real_onep (arg0))
8018 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8020 if (TREE_CODE (arg1) == REAL_CST
8021 && !TREE_OVERFLOW (arg1))
8023 REAL_VALUE_TYPE cint;
8027 c = TREE_REAL_CST (arg1);
8029 /* Optimize pow(x,0.0) = 1.0. */
8030 if (REAL_VALUES_EQUAL (c, dconst0))
8031 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8034 /* Optimize pow(x,1.0) = x. */
8035 if (REAL_VALUES_EQUAL (c, dconst1))
8038 /* Optimize pow(x,-1.0) = 1.0/x. */
8039 if (REAL_VALUES_EQUAL (c, dconstm1))
8040 return fold_build2_loc (loc, RDIV_EXPR, type,
8041 build_real (type, dconst1), arg0);
8043 /* Optimize pow(x,0.5) = sqrt(x). */
8044 if (flag_unsafe_math_optimizations
8045 && REAL_VALUES_EQUAL (c, dconsthalf))
8047 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8049 if (sqrtfn != NULL_TREE)
8050 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8053 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8054 if (flag_unsafe_math_optimizations)
8056 const REAL_VALUE_TYPE dconstroot
8057 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8059 if (REAL_VALUES_EQUAL (c, dconstroot))
8061 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8062 if (cbrtfn != NULL_TREE)
8063 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8067 /* Check for an integer exponent. */
8068 n = real_to_integer (&c);
8069 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8070 if (real_identical (&c, &cint))
8072 /* Attempt to evaluate pow at compile-time, unless this should
8073 raise an exception. */
8074 if (TREE_CODE (arg0) == REAL_CST
8075 && !TREE_OVERFLOW (arg0)
8077 || (!flag_trapping_math && !flag_errno_math)
8078 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8083 x = TREE_REAL_CST (arg0);
8084 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8085 if (flag_unsafe_math_optimizations || !inexact)
8086 return build_real (type, x);
8089 /* Strip sign ops from even integer powers. */
8090 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8092 tree narg0 = fold_strip_sign_ops (arg0);
8094 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8099 if (flag_unsafe_math_optimizations)
8101 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8103 /* Optimize pow(expN(x),y) = expN(x*y). */
8104 if (BUILTIN_EXPONENT_P (fcode))
8106 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8107 tree arg = CALL_EXPR_ARG (arg0, 0);
8108 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8109 return build_call_expr_loc (loc, expfn, 1, arg);
8112 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8113 if (BUILTIN_SQRT_P (fcode))
8115 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8116 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8117 build_real (type, dconsthalf));
8118 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8121 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8122 if (BUILTIN_CBRT_P (fcode))
8124 tree arg = CALL_EXPR_ARG (arg0, 0);
8125 if (tree_expr_nonnegative_p (arg))
8127 const REAL_VALUE_TYPE dconstroot
8128 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8129 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8130 build_real (type, dconstroot));
8131 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8135 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8136 if (fcode == BUILT_IN_POW
8137 || fcode == BUILT_IN_POWF
8138 || fcode == BUILT_IN_POWL)
8140 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8141 if (tree_expr_nonnegative_p (arg00))
8143 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8144 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8145 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8153 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8154 Return NULL_TREE if no simplification can be made. */
8156 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8157 tree arg0, tree arg1, tree type)
8159 if (!validate_arg (arg0, REAL_TYPE)
8160 || !validate_arg (arg1, INTEGER_TYPE))
8163 /* Optimize pow(1.0,y) = 1.0. */
8164 if (real_onep (arg0))
8165 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8167 if (host_integerp (arg1, 0))
8169 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8171 /* Evaluate powi at compile-time. */
8172 if (TREE_CODE (arg0) == REAL_CST
8173 && !TREE_OVERFLOW (arg0))
8176 x = TREE_REAL_CST (arg0);
8177 real_powi (&x, TYPE_MODE (type), &x, c);
8178 return build_real (type, x);
8181 /* Optimize pow(x,0) = 1.0. */
8183 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8186 /* Optimize pow(x,1) = x. */
8190 /* Optimize pow(x,-1) = 1.0/x. */
8192 return fold_build2_loc (loc, RDIV_EXPR, type,
8193 build_real (type, dconst1), arg0);
8199 /* A subroutine of fold_builtin to fold the various exponent
8200 functions. Return NULL_TREE if no simplification can be made.
8201 FUNC is the corresponding MPFR exponent function. */
8204 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8205 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8207 if (validate_arg (arg, REAL_TYPE))
8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8212 /* Calculate the result when the argument is a constant. */
8213 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8216 /* Optimize expN(logN(x)) = x. */
8217 if (flag_unsafe_math_optimizations)
8219 const enum built_in_function fcode = builtin_mathfn_code (arg);
8221 if ((func == mpfr_exp
8222 && (fcode == BUILT_IN_LOG
8223 || fcode == BUILT_IN_LOGF
8224 || fcode == BUILT_IN_LOGL))
8225 || (func == mpfr_exp2
8226 && (fcode == BUILT_IN_LOG2
8227 || fcode == BUILT_IN_LOG2F
8228 || fcode == BUILT_IN_LOG2L))
8229 || (func == mpfr_exp10
8230 && (fcode == BUILT_IN_LOG10
8231 || fcode == BUILT_IN_LOG10F
8232 || fcode == BUILT_IN_LOG10L)))
8233 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8240 /* Return true if VAR is a VAR_DECL or a component thereof. */
8243 var_decl_component_p (tree var)
8246 while (handled_component_p (inner))
8247 inner = TREE_OPERAND (inner, 0);
8248 return SSA_VAR_P (inner);
8251 /* Fold function call to builtin memset. Return
8252 NULL_TREE if no simplification can be made. */
8255 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8256 tree type, bool ignore)
8258 tree var, ret, etype;
8259 unsigned HOST_WIDE_INT length, cval;
8261 if (! validate_arg (dest, POINTER_TYPE)
8262 || ! validate_arg (c, INTEGER_TYPE)
8263 || ! validate_arg (len, INTEGER_TYPE))
8266 if (! host_integerp (len, 1))
8269 /* If the LEN parameter is zero, return DEST. */
8270 if (integer_zerop (len))
8271 return omit_one_operand_loc (loc, type, dest, c);
8273 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8278 if (TREE_CODE (var) != ADDR_EXPR)
8281 var = TREE_OPERAND (var, 0);
8282 if (TREE_THIS_VOLATILE (var))
8285 etype = TREE_TYPE (var);
8286 if (TREE_CODE (etype) == ARRAY_TYPE)
8287 etype = TREE_TYPE (etype);
8289 if (!INTEGRAL_TYPE_P (etype)
8290 && !POINTER_TYPE_P (etype))
8293 if (! var_decl_component_p (var))
8296 length = tree_low_cst (len, 1);
8297 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8298 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8302 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8305 if (integer_zerop (c))
8309 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8312 cval = tree_low_cst (c, 1);
8316 cval |= (cval << 31) << 1;
8319 ret = build_int_cst_type (etype, cval);
8320 var = build_fold_indirect_ref_loc (loc,
8321 fold_convert_loc (loc,
8322 build_pointer_type (etype),
8324 ret = build2 (MODIFY_EXPR, etype, var, ret);
8328 return omit_one_operand_loc (loc, type, dest, ret);
8331 /* Fold function call to builtin memset. Return
8332 NULL_TREE if no simplification can be made. */
8335 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8337 if (! validate_arg (dest, POINTER_TYPE)
8338 || ! validate_arg (size, INTEGER_TYPE))
8344 /* New argument list transforming bzero(ptr x, int y) to
8345 memset(ptr x, int 0, size_t y). This is done this way
8346 so that if it isn't expanded inline, we fallback to
8347 calling bzero instead of memset. */
8349 return fold_builtin_memset (loc, dest, integer_zero_node,
8350 fold_convert_loc (loc, sizetype, size),
8351 void_type_node, ignore);
8354 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8355 NULL_TREE if no simplification can be made.
8356 If ENDP is 0, return DEST (like memcpy).
8357 If ENDP is 1, return DEST+LEN (like mempcpy).
8358 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8359 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8363 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8364 tree len, tree type, bool ignore, int endp)
8366 tree destvar, srcvar, expr;
8368 if (! validate_arg (dest, POINTER_TYPE)
8369 || ! validate_arg (src, POINTER_TYPE)
8370 || ! validate_arg (len, INTEGER_TYPE))
8373 /* If the LEN parameter is zero, return DEST. */
8374 if (integer_zerop (len))
8375 return omit_one_operand_loc (loc, type, dest, src);
8377 /* If SRC and DEST are the same (and not volatile), return
8378 DEST{,+LEN,+LEN-1}. */
8379 if (operand_equal_p (src, dest, 0))
8383 tree srctype, desttype;
8384 unsigned int src_align, dest_align;
8389 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8390 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8392 /* Both DEST and SRC must be pointer types.
8393 ??? This is what old code did. Is the testing for pointer types
8396 If either SRC is readonly or length is 1, we can use memcpy. */
8397 if (!dest_align || !src_align)
8399 if (readonly_data_expr (src)
8400 || (host_integerp (len, 1)
8401 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8402 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8404 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8407 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8410 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8411 if (TREE_CODE (src) == ADDR_EXPR
8412 && TREE_CODE (dest) == ADDR_EXPR)
8414 tree src_base, dest_base, fn;
8415 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8416 HOST_WIDE_INT size = -1;
8417 HOST_WIDE_INT maxsize = -1;
8419 srcvar = TREE_OPERAND (src, 0);
8420 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8422 destvar = TREE_OPERAND (dest, 0);
8423 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8425 if (host_integerp (len, 1))
8426 maxsize = tree_low_cst (len, 1);
8429 src_offset /= BITS_PER_UNIT;
8430 dest_offset /= BITS_PER_UNIT;
8431 if (SSA_VAR_P (src_base)
8432 && SSA_VAR_P (dest_base))
8434 if (operand_equal_p (src_base, dest_base, 0)
8435 && ranges_overlap_p (src_offset, maxsize,
8436 dest_offset, maxsize))
8439 else if (TREE_CODE (src_base) == MEM_REF
8440 && TREE_CODE (dest_base) == MEM_REF)
8443 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8444 TREE_OPERAND (dest_base, 0), 0))
8446 off = double_int_add (mem_ref_offset (src_base),
8447 shwi_to_double_int (src_offset));
8448 if (!double_int_fits_in_shwi_p (off))
8450 src_offset = off.low;
8451 off = double_int_add (mem_ref_offset (dest_base),
8452 shwi_to_double_int (dest_offset));
8453 if (!double_int_fits_in_shwi_p (off))
8455 dest_offset = off.low;
8456 if (ranges_overlap_p (src_offset, maxsize,
8457 dest_offset, maxsize))
8463 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8466 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8471 if (!host_integerp (len, 0))
8474 This logic lose for arguments like (type *)malloc (sizeof (type)),
8475 since we strip the casts of up to VOID return value from malloc.
8476 Perhaps we ought to inherit type from non-VOID argument here? */
8479 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8480 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8482 tree tem = TREE_OPERAND (src, 0);
8484 if (tem != TREE_OPERAND (src, 0))
8485 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8487 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8489 tree tem = TREE_OPERAND (dest, 0);
8491 if (tem != TREE_OPERAND (dest, 0))
8492 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8494 srctype = TREE_TYPE (TREE_TYPE (src));
8496 && TREE_CODE (srctype) == ARRAY_TYPE
8497 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8499 srctype = TREE_TYPE (srctype);
8501 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8503 desttype = TREE_TYPE (TREE_TYPE (dest));
8505 && TREE_CODE (desttype) == ARRAY_TYPE
8506 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8508 desttype = TREE_TYPE (desttype);
8510 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8512 if (!srctype || !desttype
8513 || TREE_ADDRESSABLE (srctype)
8514 || TREE_ADDRESSABLE (desttype)
8515 || !TYPE_SIZE_UNIT (srctype)
8516 || !TYPE_SIZE_UNIT (desttype)
8517 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8518 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8521 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8522 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8523 if (dest_align < TYPE_ALIGN (desttype)
8524 || src_align < TYPE_ALIGN (srctype))
8528 dest = builtin_save_expr (dest);
8530 /* Build accesses at offset zero with a ref-all character type. */
8531 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8532 ptr_mode, true), 0);
8535 STRIP_NOPS (destvar);
8536 if (TREE_CODE (destvar) == ADDR_EXPR
8537 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8538 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8539 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8541 destvar = NULL_TREE;
8544 STRIP_NOPS (srcvar);
8545 if (TREE_CODE (srcvar) == ADDR_EXPR
8546 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8547 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8548 && (!STRICT_ALIGNMENT
8550 || src_align >= TYPE_ALIGN (desttype)))
8551 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8556 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8559 if (srcvar == NULL_TREE)
8561 if (STRICT_ALIGNMENT
8562 && src_align < TYPE_ALIGN (desttype))
8565 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8567 else if (destvar == NULL_TREE)
8569 if (STRICT_ALIGNMENT
8570 && dest_align < TYPE_ALIGN (srctype))
8573 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8576 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8582 if (endp == 0 || endp == 3)
8583 return omit_one_operand_loc (loc, type, dest, expr);
8589 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8592 len = fold_convert_loc (loc, sizetype, len);
8593 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8594 dest = fold_convert_loc (loc, type, dest);
8596 dest = omit_one_operand_loc (loc, type, dest, expr);
8600 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8601 If LEN is not NULL, it represents the length of the string to be
8602 copied. Return NULL_TREE if no simplification can be made. */
8605 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8609 if (!validate_arg (dest, POINTER_TYPE)
8610 || !validate_arg (src, POINTER_TYPE))
8613 /* If SRC and DEST are the same (and not volatile), return DEST. */
8614 if (operand_equal_p (src, dest, 0))
8615 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8617 if (optimize_function_for_size_p (cfun))
8620 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8626 len = c_strlen (src, 1);
8627 if (! len || TREE_SIDE_EFFECTS (len))
8631 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8632 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8633 build_call_expr_loc (loc, fn, 3, dest, src, len));
8636 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8637 Return NULL_TREE if no simplification can be made. */
8640 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8642 tree fn, len, lenp1, call, type;
8644 if (!validate_arg (dest, POINTER_TYPE)
8645 || !validate_arg (src, POINTER_TYPE))
8648 len = c_strlen (src, 1);
8650 || TREE_CODE (len) != INTEGER_CST)
8653 if (optimize_function_for_size_p (cfun)
8654 /* If length is zero it's small enough. */
8655 && !integer_zerop (len))
8658 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8662 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8663 /* We use dest twice in building our expression. Save it from
8664 multiple expansions. */
8665 dest = builtin_save_expr (dest);
8666 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8668 type = TREE_TYPE (TREE_TYPE (fndecl));
8669 len = fold_convert_loc (loc, sizetype, len);
8670 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8671 dest = fold_convert_loc (loc, type, dest);
8672 dest = omit_one_operand_loc (loc, type, dest, call);
8676 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8677 If SLEN is not NULL, it represents the length of the source string.
8678 Return NULL_TREE if no simplification can be made. */
8681 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8682 tree src, tree len, tree slen)
8686 if (!validate_arg (dest, POINTER_TYPE)
8687 || !validate_arg (src, POINTER_TYPE)
8688 || !validate_arg (len, INTEGER_TYPE))
8691 /* If the LEN parameter is zero, return DEST. */
8692 if (integer_zerop (len))
8693 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8695 /* We can't compare slen with len as constants below if len is not a
8697 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8701 slen = c_strlen (src, 1);
8703 /* Now, we must be passed a constant src ptr parameter. */
8704 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8707 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8709 /* We do not support simplification of this case, though we do
8710 support it when expanding trees into RTL. */
8711 /* FIXME: generate a call to __builtin_memset. */
8712 if (tree_int_cst_lt (slen, len))
8715 /* OK transform into builtin memcpy. */
8716 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8719 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8720 build_call_expr_loc (loc, fn, 3, dest, src, len));
8723 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8724 arguments to the call, and TYPE is its return type.
8725 Return NULL_TREE if no simplification can be made. */
8728 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8730 if (!validate_arg (arg1, POINTER_TYPE)
8731 || !validate_arg (arg2, INTEGER_TYPE)
8732 || !validate_arg (len, INTEGER_TYPE))
8738 if (TREE_CODE (arg2) != INTEGER_CST
8739 || !host_integerp (len, 1))
8742 p1 = c_getstr (arg1);
8743 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8749 if (target_char_cast (arg2, &c))
8752 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8755 return build_int_cst (TREE_TYPE (arg1), 0);
8757 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8759 return fold_convert_loc (loc, type, tem);
8765 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8766 Return NULL_TREE if no simplification can be made. */
8769 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8771 const char *p1, *p2;
8773 if (!validate_arg (arg1, POINTER_TYPE)
8774 || !validate_arg (arg2, POINTER_TYPE)
8775 || !validate_arg (len, INTEGER_TYPE))
8778 /* If the LEN parameter is zero, return zero. */
8779 if (integer_zerop (len))
8780 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8783 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8784 if (operand_equal_p (arg1, arg2, 0))
8785 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8787 p1 = c_getstr (arg1);
8788 p2 = c_getstr (arg2);
8790 /* If all arguments are constant, and the value of len is not greater
8791 than the lengths of arg1 and arg2, evaluate at compile-time. */
8792 if (host_integerp (len, 1) && p1 && p2
8793 && compare_tree_int (len, strlen (p1) + 1) <= 0
8794 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8796 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8799 return integer_one_node;
8801 return integer_minus_one_node;
8803 return integer_zero_node;
8806 /* If len parameter is one, return an expression corresponding to
8807 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8808 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8810 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8811 tree cst_uchar_ptr_node
8812 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8815 = fold_convert_loc (loc, integer_type_node,
8816 build1 (INDIRECT_REF, cst_uchar_node,
8817 fold_convert_loc (loc,
8821 = fold_convert_loc (loc, integer_type_node,
8822 build1 (INDIRECT_REF, cst_uchar_node,
8823 fold_convert_loc (loc,
8826 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8832 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8833 Return NULL_TREE if no simplification can be made. */
8836 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8838 const char *p1, *p2;
8840 if (!validate_arg (arg1, POINTER_TYPE)
8841 || !validate_arg (arg2, POINTER_TYPE))
8844 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8845 if (operand_equal_p (arg1, arg2, 0))
8846 return integer_zero_node;
8848 p1 = c_getstr (arg1);
8849 p2 = c_getstr (arg2);
8853 const int i = strcmp (p1, p2);
8855 return integer_minus_one_node;
8857 return integer_one_node;
8859 return integer_zero_node;
8862 /* If the second arg is "", return *(const unsigned char*)arg1. */
8863 if (p2 && *p2 == '\0')
8865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8866 tree cst_uchar_ptr_node
8867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8869 return fold_convert_loc (loc, integer_type_node,
8870 build1 (INDIRECT_REF, cst_uchar_node,
8871 fold_convert_loc (loc,
8876 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8877 if (p1 && *p1 == '\0')
8879 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8880 tree cst_uchar_ptr_node
8881 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8884 = fold_convert_loc (loc, integer_type_node,
8885 build1 (INDIRECT_REF, cst_uchar_node,
8886 fold_convert_loc (loc,
8889 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8895 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8896 Return NULL_TREE if no simplification can be made. */
8899 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8901 const char *p1, *p2;
8903 if (!validate_arg (arg1, POINTER_TYPE)
8904 || !validate_arg (arg2, POINTER_TYPE)
8905 || !validate_arg (len, INTEGER_TYPE))
8908 /* If the LEN parameter is zero, return zero. */
8909 if (integer_zerop (len))
8910 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8913 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8914 if (operand_equal_p (arg1, arg2, 0))
8915 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8917 p1 = c_getstr (arg1);
8918 p2 = c_getstr (arg2);
8920 if (host_integerp (len, 1) && p1 && p2)
8922 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8924 return integer_one_node;
8926 return integer_minus_one_node;
8928 return integer_zero_node;
8931 /* If the second arg is "", and the length is greater than zero,
8932 return *(const unsigned char*)arg1. */
8933 if (p2 && *p2 == '\0'
8934 && TREE_CODE (len) == INTEGER_CST
8935 && tree_int_cst_sgn (len) == 1)
8937 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8938 tree cst_uchar_ptr_node
8939 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8941 return fold_convert_loc (loc, integer_type_node,
8942 build1 (INDIRECT_REF, cst_uchar_node,
8943 fold_convert_loc (loc,
8948 /* If the first arg is "", and the length is greater than zero,
8949 return -*(const unsigned char*)arg2. */
8950 if (p1 && *p1 == '\0'
8951 && TREE_CODE (len) == INTEGER_CST
8952 && tree_int_cst_sgn (len) == 1)
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8958 tree temp = fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8963 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8966 /* If len parameter is one, return an expression corresponding to
8967 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8968 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8970 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8971 tree cst_uchar_ptr_node
8972 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8974 tree ind1 = fold_convert_loc (loc, integer_type_node,
8975 build1 (INDIRECT_REF, cst_uchar_node,
8976 fold_convert_loc (loc,
8979 tree ind2 = fold_convert_loc (loc, integer_type_node,
8980 build1 (INDIRECT_REF, cst_uchar_node,
8981 fold_convert_loc (loc,
8984 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8990 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8991 ARG. Return NULL_TREE if no simplification can be made. */
8994 fold_builtin_signbit (location_t loc, tree arg, tree type)
8998 if (!validate_arg (arg, REAL_TYPE))
9001 /* If ARG is a compile-time constant, determine the result. */
9002 if (TREE_CODE (arg) == REAL_CST
9003 && !TREE_OVERFLOW (arg))
9007 c = TREE_REAL_CST (arg);
9008 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9009 return fold_convert_loc (loc, type, temp);
9012 /* If ARG is non-negative, the result is always zero. */
9013 if (tree_expr_nonnegative_p (arg))
9014 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9016 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9017 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9018 return fold_build2_loc (loc, LT_EXPR, type, arg,
9019 build_real (TREE_TYPE (arg), dconst0));
9024 /* Fold function call to builtin copysign, copysignf or copysignl with
9025 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9029 fold_builtin_copysign (location_t loc, tree fndecl,
9030 tree arg1, tree arg2, tree type)
9034 if (!validate_arg (arg1, REAL_TYPE)
9035 || !validate_arg (arg2, REAL_TYPE))
9038 /* copysign(X,X) is X. */
9039 if (operand_equal_p (arg1, arg2, 0))
9040 return fold_convert_loc (loc, type, arg1);
9042 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9043 if (TREE_CODE (arg1) == REAL_CST
9044 && TREE_CODE (arg2) == REAL_CST
9045 && !TREE_OVERFLOW (arg1)
9046 && !TREE_OVERFLOW (arg2))
9048 REAL_VALUE_TYPE c1, c2;
9050 c1 = TREE_REAL_CST (arg1);
9051 c2 = TREE_REAL_CST (arg2);
9052 /* c1.sign := c2.sign. */
9053 real_copysign (&c1, &c2);
9054 return build_real (type, c1);
9057 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9058 Remember to evaluate Y for side-effects. */
9059 if (tree_expr_nonnegative_p (arg2))
9060 return omit_one_operand_loc (loc, type,
9061 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9064 /* Strip sign changing operations for the first argument. */
9065 tem = fold_strip_sign_ops (arg1);
9067 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9072 /* Fold a call to builtin isascii with argument ARG. */
9075 fold_builtin_isascii (location_t loc, tree arg)
9077 if (!validate_arg (arg, INTEGER_TYPE))
9081 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9082 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9083 build_int_cst (NULL_TREE,
9084 ~ (unsigned HOST_WIDE_INT) 0x7f));
9085 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9086 arg, integer_zero_node);
9090 /* Fold a call to builtin toascii with argument ARG. */
9093 fold_builtin_toascii (location_t loc, tree arg)
9095 if (!validate_arg (arg, INTEGER_TYPE))
9098 /* Transform toascii(c) -> (c & 0x7f). */
9099 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9100 build_int_cst (NULL_TREE, 0x7f));
9103 /* Fold a call to builtin isdigit with argument ARG. */
9106 fold_builtin_isdigit (location_t loc, tree arg)
9108 if (!validate_arg (arg, INTEGER_TYPE))
9112 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9113 /* According to the C standard, isdigit is unaffected by locale.
9114 However, it definitely is affected by the target character set. */
9115 unsigned HOST_WIDE_INT target_digit0
9116 = lang_hooks.to_target_charset ('0');
9118 if (target_digit0 == 0)
9121 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9122 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9123 build_int_cst (unsigned_type_node, target_digit0));
9124 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9125 build_int_cst (unsigned_type_node, 9));
9129 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9132 fold_builtin_fabs (location_t loc, tree arg, tree type)
9134 if (!validate_arg (arg, REAL_TYPE))
9137 arg = fold_convert_loc (loc, type, arg);
9138 if (TREE_CODE (arg) == REAL_CST)
9139 return fold_abs_const (arg, type);
9140 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9143 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9146 fold_builtin_abs (location_t loc, tree arg, tree type)
9148 if (!validate_arg (arg, INTEGER_TYPE))
9151 arg = fold_convert_loc (loc, type, arg);
9152 if (TREE_CODE (arg) == INTEGER_CST)
9153 return fold_abs_const (arg, type);
9154 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9157 /* Fold a call to builtin fmin or fmax. */
9160 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9161 tree type, bool max)
9163 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9165 /* Calculate the result when the argument is a constant. */
9166 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9171 /* If either argument is NaN, return the other one. Avoid the
9172 transformation if we get (and honor) a signalling NaN. Using
9173 omit_one_operand() ensures we create a non-lvalue. */
9174 if (TREE_CODE (arg0) == REAL_CST
9175 && real_isnan (&TREE_REAL_CST (arg0))
9176 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9177 || ! TREE_REAL_CST (arg0).signalling))
9178 return omit_one_operand_loc (loc, type, arg1, arg0);
9179 if (TREE_CODE (arg1) == REAL_CST
9180 && real_isnan (&TREE_REAL_CST (arg1))
9181 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9182 || ! TREE_REAL_CST (arg1).signalling))
9183 return omit_one_operand_loc (loc, type, arg0, arg1);
9185 /* Transform fmin/fmax(x,x) -> x. */
9186 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9187 return omit_one_operand_loc (loc, type, arg0, arg1);
9189 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9190 functions to return the numeric arg if the other one is NaN.
9191 These tree codes don't honor that, so only transform if
9192 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9193 handled, so we don't have to worry about it either. */
9194 if (flag_finite_math_only)
9195 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9196 fold_convert_loc (loc, type, arg0),
9197 fold_convert_loc (loc, type, arg1));
9202 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9205 fold_builtin_carg (location_t loc, tree arg, tree type)
9207 if (validate_arg (arg, COMPLEX_TYPE)
9208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9210 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9214 tree new_arg = builtin_save_expr (arg);
9215 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9216 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9217 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9224 /* Fold a call to builtin logb/ilogb. */
9227 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9229 if (! validate_arg (arg, REAL_TYPE))
9234 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9236 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9242 /* If arg is Inf or NaN and we're logb, return it. */
9243 if (TREE_CODE (rettype) == REAL_TYPE)
9244 return fold_convert_loc (loc, rettype, arg);
9245 /* Fall through... */
9247 /* Zero may set errno and/or raise an exception for logb, also
9248 for ilogb we don't know FP_ILOGB0. */
9251 /* For normal numbers, proceed iff radix == 2. In GCC,
9252 normalized significands are in the range [0.5, 1.0). We
9253 want the exponent as if they were [1.0, 2.0) so get the
9254 exponent and subtract 1. */
9255 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9256 return fold_convert_loc (loc, rettype,
9257 build_int_cst (NULL_TREE,
9258 REAL_EXP (value)-1));
9266 /* Fold a call to builtin significand, if radix == 2. */
9269 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9271 if (! validate_arg (arg, REAL_TYPE))
9276 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9278 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9285 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9286 return fold_convert_loc (loc, rettype, arg);
9288 /* For normal numbers, proceed iff radix == 2. */
9289 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9291 REAL_VALUE_TYPE result = *value;
9292 /* In GCC, normalized significands are in the range [0.5,
9293 1.0). We want them to be [1.0, 2.0) so set the
9295 SET_REAL_EXP (&result, 1);
9296 return build_real (rettype, result);
9305 /* Fold a call to builtin frexp, we can assume the base is 2. */
9308 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9310 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9315 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9318 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9320 /* Proceed if a valid pointer type was passed in. */
9321 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9323 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9329 /* For +-0, return (*exp = 0, +-0). */
9330 exp = integer_zero_node;
9335 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9336 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9339 /* Since the frexp function always expects base 2, and in
9340 GCC normalized significands are already in the range
9341 [0.5, 1.0), we have exactly what frexp wants. */
9342 REAL_VALUE_TYPE frac_rvt = *value;
9343 SET_REAL_EXP (&frac_rvt, 0);
9344 frac = build_real (rettype, frac_rvt);
9345 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9352 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9353 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9354 TREE_SIDE_EFFECTS (arg1) = 1;
9355 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9361 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9362 then we can assume the base is two. If it's false, then we have to
9363 check the mode of the TYPE parameter in certain cases. */
9366 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9367 tree type, bool ldexp)
9369 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9374 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9375 if (real_zerop (arg0) || integer_zerop (arg1)
9376 || (TREE_CODE (arg0) == REAL_CST
9377 && !real_isfinite (&TREE_REAL_CST (arg0))))
9378 return omit_one_operand_loc (loc, type, arg0, arg1);
9380 /* If both arguments are constant, then try to evaluate it. */
9381 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9382 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9383 && host_integerp (arg1, 0))
9385 /* Bound the maximum adjustment to twice the range of the
9386 mode's valid exponents. Use abs to ensure the range is
9387 positive as a sanity check. */
9388 const long max_exp_adj = 2 *
9389 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9390 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9392 /* Get the user-requested adjustment. */
9393 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9395 /* The requested adjustment must be inside this range. This
9396 is a preliminary cap to avoid things like overflow, we
9397 may still fail to compute the result for other reasons. */
9398 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9400 REAL_VALUE_TYPE initial_result;
9402 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9404 /* Ensure we didn't overflow. */
9405 if (! real_isinf (&initial_result))
9407 const REAL_VALUE_TYPE trunc_result
9408 = real_value_truncate (TYPE_MODE (type), initial_result);
9410 /* Only proceed if the target mode can hold the
9412 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9413 return build_real (type, trunc_result);
9422 /* Fold a call to builtin modf. */
9425 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9427 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9432 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9435 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9437 /* Proceed if a valid pointer type was passed in. */
9438 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9440 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9441 REAL_VALUE_TYPE trunc, frac;
9447 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9448 trunc = frac = *value;
9451 /* For +-Inf, return (*arg1 = arg0, +-0). */
9453 frac.sign = value->sign;
9457 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9458 real_trunc (&trunc, VOIDmode, value);
9459 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9460 /* If the original number was negative and already
9461 integral, then the fractional part is -0.0. */
9462 if (value->sign && frac.cl == rvc_zero)
9463 frac.sign = value->sign;
9467 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9468 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9469 build_real (rettype, trunc));
9470 TREE_SIDE_EFFECTS (arg1) = 1;
9471 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9472 build_real (rettype, frac));
9478 /* Given a location LOC, an interclass builtin function decl FNDECL
9479 and its single argument ARG, return an folded expression computing
9480 the same, or NULL_TREE if we either couldn't or didn't want to fold
9481 (the latter happen if there's an RTL instruction available). */
9484 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9486 enum machine_mode mode;
9488 if (!validate_arg (arg, REAL_TYPE))
9491 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9494 mode = TYPE_MODE (TREE_TYPE (arg));
9496 /* If there is no optab, try generic code. */
9497 switch (DECL_FUNCTION_CODE (fndecl))
9501 CASE_FLT_FN (BUILT_IN_ISINF):
9503 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9504 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9505 tree const type = TREE_TYPE (arg);
9509 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9510 real_from_string (&r, buf);
9511 result = build_call_expr (isgr_fn, 2,
9512 fold_build1_loc (loc, ABS_EXPR, type, arg),
9513 build_real (type, r));
9516 CASE_FLT_FN (BUILT_IN_FINITE):
9517 case BUILT_IN_ISFINITE:
9519 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9520 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9521 tree const type = TREE_TYPE (arg);
9525 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9526 real_from_string (&r, buf);
9527 result = build_call_expr (isle_fn, 2,
9528 fold_build1_loc (loc, ABS_EXPR, type, arg),
9529 build_real (type, r));
9530 /*result = fold_build2_loc (loc, UNGT_EXPR,
9531 TREE_TYPE (TREE_TYPE (fndecl)),
9532 fold_build1_loc (loc, ABS_EXPR, type, arg),
9533 build_real (type, r));
9534 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9535 TREE_TYPE (TREE_TYPE (fndecl)),
9539 case BUILT_IN_ISNORMAL:
9541 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9542 islessequal(fabs(x),DBL_MAX). */
9543 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9544 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9545 tree const type = TREE_TYPE (arg);
9546 REAL_VALUE_TYPE rmax, rmin;
9549 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9550 real_from_string (&rmax, buf);
9551 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9552 real_from_string (&rmin, buf);
9553 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9554 result = build_call_expr (isle_fn, 2, arg,
9555 build_real (type, rmax));
9556 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9557 build_call_expr (isge_fn, 2, arg,
9558 build_real (type, rmin)));
9568 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9569 ARG is the argument for the call. */
9572 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9574 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9577 if (!validate_arg (arg, REAL_TYPE))
9580 switch (builtin_index)
9582 case BUILT_IN_ISINF:
9583 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9584 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9586 if (TREE_CODE (arg) == REAL_CST)
9588 r = TREE_REAL_CST (arg);
9589 if (real_isinf (&r))
9590 return real_compare (GT_EXPR, &r, &dconst0)
9591 ? integer_one_node : integer_minus_one_node;
9593 return integer_zero_node;
9598 case BUILT_IN_ISINF_SIGN:
9600 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9601 /* In a boolean context, GCC will fold the inner COND_EXPR to
9602 1. So e.g. "if (isinf_sign(x))" would be folded to just
9603 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9604 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9605 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9606 tree tmp = NULL_TREE;
9608 arg = builtin_save_expr (arg);
9610 if (signbit_fn && isinf_fn)
9612 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9613 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9615 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9616 signbit_call, integer_zero_node);
9617 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9618 isinf_call, integer_zero_node);
9620 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9621 integer_minus_one_node, integer_one_node);
9622 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9630 case BUILT_IN_ISFINITE:
9631 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9632 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9633 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9635 if (TREE_CODE (arg) == REAL_CST)
9637 r = TREE_REAL_CST (arg);
9638 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9643 case BUILT_IN_ISNAN:
9644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9645 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9647 if (TREE_CODE (arg) == REAL_CST)
9649 r = TREE_REAL_CST (arg);
9650 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9653 arg = builtin_save_expr (arg);
9654 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9661 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9662 This builtin will generate code to return the appropriate floating
9663 point classification depending on the value of the floating point
9664 number passed in. The possible return values must be supplied as
9665 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9666 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9667 one floating point argument which is "type generic". */
9670 fold_builtin_fpclassify (location_t loc, tree exp)
9672 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9673 arg, type, res, tmp;
9674 enum machine_mode mode;
9678 /* Verify the required arguments in the original call. */
9679 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9680 INTEGER_TYPE, INTEGER_TYPE,
9681 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9684 fp_nan = CALL_EXPR_ARG (exp, 0);
9685 fp_infinite = CALL_EXPR_ARG (exp, 1);
9686 fp_normal = CALL_EXPR_ARG (exp, 2);
9687 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9688 fp_zero = CALL_EXPR_ARG (exp, 4);
9689 arg = CALL_EXPR_ARG (exp, 5);
9690 type = TREE_TYPE (arg);
9691 mode = TYPE_MODE (type);
9692 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9696 (fabs(x) == Inf ? FP_INFINITE :
9697 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9698 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9700 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9701 build_real (type, dconst0));
9702 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9703 tmp, fp_zero, fp_subnormal);
9705 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9706 real_from_string (&r, buf);
9707 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9708 arg, build_real (type, r));
9709 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9711 if (HONOR_INFINITIES (mode))
9714 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9715 build_real (type, r));
9716 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9720 if (HONOR_NANS (mode))
9722 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9723 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9729 /* Fold a call to an unordered comparison function such as
9730 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9731 being called and ARG0 and ARG1 are the arguments for the call.
9732 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9733 the opposite of the desired result. UNORDERED_CODE is used
9734 for modes that can hold NaNs and ORDERED_CODE is used for
9738 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9739 enum tree_code unordered_code,
9740 enum tree_code ordered_code)
9742 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9743 enum tree_code code;
9745 enum tree_code code0, code1;
9746 tree cmp_type = NULL_TREE;
9748 type0 = TREE_TYPE (arg0);
9749 type1 = TREE_TYPE (arg1);
9751 code0 = TREE_CODE (type0);
9752 code1 = TREE_CODE (type1);
9754 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9755 /* Choose the wider of two real types. */
9756 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9758 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9760 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9763 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9764 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9766 if (unordered_code == UNORDERED_EXPR)
9768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9769 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9770 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9773 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9775 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9776 fold_build2_loc (loc, code, type, arg0, arg1));
9779 /* Fold a call to built-in function FNDECL with 0 arguments.
9780 IGNORE is true if the result of the function call is ignored. This
9781 function returns NULL_TREE if no simplification was possible. */
9784 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9786 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9787 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9790 CASE_FLT_FN (BUILT_IN_INF):
9791 case BUILT_IN_INFD32:
9792 case BUILT_IN_INFD64:
9793 case BUILT_IN_INFD128:
9794 return fold_builtin_inf (loc, type, true);
9796 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9797 return fold_builtin_inf (loc, type, false);
9799 case BUILT_IN_CLASSIFY_TYPE:
9800 return fold_builtin_classify_type (NULL_TREE);
9808 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9809 IGNORE is true if the result of the function call is ignored. This
9810 function returns NULL_TREE if no simplification was possible. */
9813 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9815 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9816 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9819 case BUILT_IN_CONSTANT_P:
9821 tree val = fold_builtin_constant_p (arg0);
9823 /* Gimplification will pull the CALL_EXPR for the builtin out of
9824 an if condition. When not optimizing, we'll not CSE it back.
9825 To avoid link error types of regressions, return false now. */
9826 if (!val && !optimize)
9827 val = integer_zero_node;
9832 case BUILT_IN_CLASSIFY_TYPE:
9833 return fold_builtin_classify_type (arg0);
9835 case BUILT_IN_STRLEN:
9836 return fold_builtin_strlen (loc, type, arg0);
9838 CASE_FLT_FN (BUILT_IN_FABS):
9839 return fold_builtin_fabs (loc, arg0, type);
9843 case BUILT_IN_LLABS:
9844 case BUILT_IN_IMAXABS:
9845 return fold_builtin_abs (loc, arg0, type);
9847 CASE_FLT_FN (BUILT_IN_CONJ):
9848 if (validate_arg (arg0, COMPLEX_TYPE)
9849 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9850 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9853 CASE_FLT_FN (BUILT_IN_CREAL):
9854 if (validate_arg (arg0, COMPLEX_TYPE)
9855 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9856 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9859 CASE_FLT_FN (BUILT_IN_CIMAG):
9860 if (validate_arg (arg0, COMPLEX_TYPE)
9861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9862 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9865 CASE_FLT_FN (BUILT_IN_CCOS):
9866 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9868 CASE_FLT_FN (BUILT_IN_CCOSH):
9869 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9871 CASE_FLT_FN (BUILT_IN_CPROJ):
9872 return fold_builtin_cproj(loc, arg0, type);
9874 CASE_FLT_FN (BUILT_IN_CSIN):
9875 if (validate_arg (arg0, COMPLEX_TYPE)
9876 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9877 return do_mpc_arg1 (arg0, type, mpc_sin);
9880 CASE_FLT_FN (BUILT_IN_CSINH):
9881 if (validate_arg (arg0, COMPLEX_TYPE)
9882 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9883 return do_mpc_arg1 (arg0, type, mpc_sinh);
9886 CASE_FLT_FN (BUILT_IN_CTAN):
9887 if (validate_arg (arg0, COMPLEX_TYPE)
9888 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9889 return do_mpc_arg1 (arg0, type, mpc_tan);
9892 CASE_FLT_FN (BUILT_IN_CTANH):
9893 if (validate_arg (arg0, COMPLEX_TYPE)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9895 return do_mpc_arg1 (arg0, type, mpc_tanh);
9898 CASE_FLT_FN (BUILT_IN_CLOG):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return do_mpc_arg1 (arg0, type, mpc_log);
9904 CASE_FLT_FN (BUILT_IN_CSQRT):
9905 if (validate_arg (arg0, COMPLEX_TYPE)
9906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9907 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9910 CASE_FLT_FN (BUILT_IN_CASIN):
9911 if (validate_arg (arg0, COMPLEX_TYPE)
9912 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9913 return do_mpc_arg1 (arg0, type, mpc_asin);
9916 CASE_FLT_FN (BUILT_IN_CACOS):
9917 if (validate_arg (arg0, COMPLEX_TYPE)
9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9919 return do_mpc_arg1 (arg0, type, mpc_acos);
9922 CASE_FLT_FN (BUILT_IN_CATAN):
9923 if (validate_arg (arg0, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9925 return do_mpc_arg1 (arg0, type, mpc_atan);
9928 CASE_FLT_FN (BUILT_IN_CASINH):
9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9931 return do_mpc_arg1 (arg0, type, mpc_asinh);
9934 CASE_FLT_FN (BUILT_IN_CACOSH):
9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9937 return do_mpc_arg1 (arg0, type, mpc_acosh);
9940 CASE_FLT_FN (BUILT_IN_CATANH):
9941 if (validate_arg (arg0, COMPLEX_TYPE)
9942 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9943 return do_mpc_arg1 (arg0, type, mpc_atanh);
9946 CASE_FLT_FN (BUILT_IN_CABS):
9947 return fold_builtin_cabs (loc, arg0, type, fndecl);
9949 CASE_FLT_FN (BUILT_IN_CARG):
9950 return fold_builtin_carg (loc, arg0, type);
9952 CASE_FLT_FN (BUILT_IN_SQRT):
9953 return fold_builtin_sqrt (loc, arg0, type);
9955 CASE_FLT_FN (BUILT_IN_CBRT):
9956 return fold_builtin_cbrt (loc, arg0, type);
9958 CASE_FLT_FN (BUILT_IN_ASIN):
9959 if (validate_arg (arg0, REAL_TYPE))
9960 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9961 &dconstm1, &dconst1, true);
9964 CASE_FLT_FN (BUILT_IN_ACOS):
9965 if (validate_arg (arg0, REAL_TYPE))
9966 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9967 &dconstm1, &dconst1, true);
9970 CASE_FLT_FN (BUILT_IN_ATAN):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9975 CASE_FLT_FN (BUILT_IN_ASINH):
9976 if (validate_arg (arg0, REAL_TYPE))
9977 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9980 CASE_FLT_FN (BUILT_IN_ACOSH):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9983 &dconst1, NULL, true);
9986 CASE_FLT_FN (BUILT_IN_ATANH):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9989 &dconstm1, &dconst1, false);
9992 CASE_FLT_FN (BUILT_IN_SIN):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9997 CASE_FLT_FN (BUILT_IN_COS):
9998 return fold_builtin_cos (loc, arg0, type, fndecl);
10000 CASE_FLT_FN (BUILT_IN_TAN):
10001 return fold_builtin_tan (arg0, type);
10003 CASE_FLT_FN (BUILT_IN_CEXP):
10004 return fold_builtin_cexp (loc, arg0, type);
10006 CASE_FLT_FN (BUILT_IN_CEXPI):
10007 if (validate_arg (arg0, REAL_TYPE))
10008 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10011 CASE_FLT_FN (BUILT_IN_SINH):
10012 if (validate_arg (arg0, REAL_TYPE))
10013 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10016 CASE_FLT_FN (BUILT_IN_COSH):
10017 return fold_builtin_cosh (loc, arg0, type, fndecl);
10019 CASE_FLT_FN (BUILT_IN_TANH):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10024 CASE_FLT_FN (BUILT_IN_ERF):
10025 if (validate_arg (arg0, REAL_TYPE))
10026 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10029 CASE_FLT_FN (BUILT_IN_ERFC):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10034 CASE_FLT_FN (BUILT_IN_TGAMMA):
10035 if (validate_arg (arg0, REAL_TYPE))
10036 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10039 CASE_FLT_FN (BUILT_IN_EXP):
10040 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10042 CASE_FLT_FN (BUILT_IN_EXP2):
10043 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10045 CASE_FLT_FN (BUILT_IN_EXP10):
10046 CASE_FLT_FN (BUILT_IN_POW10):
10047 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10049 CASE_FLT_FN (BUILT_IN_EXPM1):
10050 if (validate_arg (arg0, REAL_TYPE))
10051 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10054 CASE_FLT_FN (BUILT_IN_LOG):
10055 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10057 CASE_FLT_FN (BUILT_IN_LOG2):
10058 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10060 CASE_FLT_FN (BUILT_IN_LOG10):
10061 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10063 CASE_FLT_FN (BUILT_IN_LOG1P):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10066 &dconstm1, NULL, false);
10069 CASE_FLT_FN (BUILT_IN_J0):
10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10075 CASE_FLT_FN (BUILT_IN_J1):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10081 CASE_FLT_FN (BUILT_IN_Y0):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10084 &dconst0, NULL, false);
10087 CASE_FLT_FN (BUILT_IN_Y1):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10090 &dconst0, NULL, false);
10093 CASE_FLT_FN (BUILT_IN_NAN):
10094 case BUILT_IN_NAND32:
10095 case BUILT_IN_NAND64:
10096 case BUILT_IN_NAND128:
10097 return fold_builtin_nan (arg0, type, true);
10099 CASE_FLT_FN (BUILT_IN_NANS):
10100 return fold_builtin_nan (arg0, type, false);
10102 CASE_FLT_FN (BUILT_IN_FLOOR):
10103 return fold_builtin_floor (loc, fndecl, arg0);
10105 CASE_FLT_FN (BUILT_IN_CEIL):
10106 return fold_builtin_ceil (loc, fndecl, arg0);
10108 CASE_FLT_FN (BUILT_IN_TRUNC):
10109 return fold_builtin_trunc (loc, fndecl, arg0);
10111 CASE_FLT_FN (BUILT_IN_ROUND):
10112 return fold_builtin_round (loc, fndecl, arg0);
10114 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10115 CASE_FLT_FN (BUILT_IN_RINT):
10116 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10118 CASE_FLT_FN (BUILT_IN_LCEIL):
10119 CASE_FLT_FN (BUILT_IN_LLCEIL):
10120 CASE_FLT_FN (BUILT_IN_LFLOOR):
10121 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10122 CASE_FLT_FN (BUILT_IN_LROUND):
10123 CASE_FLT_FN (BUILT_IN_LLROUND):
10124 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10126 CASE_FLT_FN (BUILT_IN_LRINT):
10127 CASE_FLT_FN (BUILT_IN_LLRINT):
10128 return fold_fixed_mathfn (loc, fndecl, arg0);
10130 case BUILT_IN_BSWAP32:
10131 case BUILT_IN_BSWAP64:
10132 return fold_builtin_bswap (fndecl, arg0);
10134 CASE_INT_FN (BUILT_IN_FFS):
10135 CASE_INT_FN (BUILT_IN_CLZ):
10136 CASE_INT_FN (BUILT_IN_CTZ):
10137 CASE_INT_FN (BUILT_IN_POPCOUNT):
10138 CASE_INT_FN (BUILT_IN_PARITY):
10139 return fold_builtin_bitop (fndecl, arg0);
10141 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10142 return fold_builtin_signbit (loc, arg0, type);
10144 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10145 return fold_builtin_significand (loc, arg0, type);
10147 CASE_FLT_FN (BUILT_IN_ILOGB):
10148 CASE_FLT_FN (BUILT_IN_LOGB):
10149 return fold_builtin_logb (loc, arg0, type);
10151 case BUILT_IN_ISASCII:
10152 return fold_builtin_isascii (loc, arg0);
10154 case BUILT_IN_TOASCII:
10155 return fold_builtin_toascii (loc, arg0);
10157 case BUILT_IN_ISDIGIT:
10158 return fold_builtin_isdigit (loc, arg0);
10160 CASE_FLT_FN (BUILT_IN_FINITE):
10161 case BUILT_IN_FINITED32:
10162 case BUILT_IN_FINITED64:
10163 case BUILT_IN_FINITED128:
10164 case BUILT_IN_ISFINITE:
10166 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10169 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10172 CASE_FLT_FN (BUILT_IN_ISINF):
10173 case BUILT_IN_ISINFD32:
10174 case BUILT_IN_ISINFD64:
10175 case BUILT_IN_ISINFD128:
10177 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10180 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10183 case BUILT_IN_ISNORMAL:
10184 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10186 case BUILT_IN_ISINF_SIGN:
10187 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10189 CASE_FLT_FN (BUILT_IN_ISNAN):
10190 case BUILT_IN_ISNAND32:
10191 case BUILT_IN_ISNAND64:
10192 case BUILT_IN_ISNAND128:
10193 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10195 case BUILT_IN_PRINTF:
10196 case BUILT_IN_PRINTF_UNLOCKED:
10197 case BUILT_IN_VPRINTF:
10198 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10200 case BUILT_IN_FREE:
10201 if (integer_zerop (arg0))
10202 return build_empty_stmt (loc);
10213 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10214 IGNORE is true if the result of the function call is ignored. This
10215 function returns NULL_TREE if no simplification was possible. */
10218 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10220 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10221 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10225 CASE_FLT_FN (BUILT_IN_JN):
10226 if (validate_arg (arg0, INTEGER_TYPE)
10227 && validate_arg (arg1, REAL_TYPE))
10228 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10231 CASE_FLT_FN (BUILT_IN_YN):
10232 if (validate_arg (arg0, INTEGER_TYPE)
10233 && validate_arg (arg1, REAL_TYPE))
10234 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10238 CASE_FLT_FN (BUILT_IN_DREM):
10239 CASE_FLT_FN (BUILT_IN_REMAINDER):
10240 if (validate_arg (arg0, REAL_TYPE)
10241 && validate_arg(arg1, REAL_TYPE))
10242 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10245 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10246 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10247 if (validate_arg (arg0, REAL_TYPE)
10248 && validate_arg(arg1, POINTER_TYPE))
10249 return do_mpfr_lgamma_r (arg0, arg1, type);
10252 CASE_FLT_FN (BUILT_IN_ATAN2):
10253 if (validate_arg (arg0, REAL_TYPE)
10254 && validate_arg(arg1, REAL_TYPE))
10255 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10258 CASE_FLT_FN (BUILT_IN_FDIM):
10259 if (validate_arg (arg0, REAL_TYPE)
10260 && validate_arg(arg1, REAL_TYPE))
10261 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10264 CASE_FLT_FN (BUILT_IN_HYPOT):
10265 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10267 CASE_FLT_FN (BUILT_IN_CPOW):
10268 if (validate_arg (arg0, COMPLEX_TYPE)
10269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10270 && validate_arg (arg1, COMPLEX_TYPE)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10272 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10275 CASE_FLT_FN (BUILT_IN_LDEXP):
10276 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10277 CASE_FLT_FN (BUILT_IN_SCALBN):
10278 CASE_FLT_FN (BUILT_IN_SCALBLN):
10279 return fold_builtin_load_exponent (loc, arg0, arg1,
10280 type, /*ldexp=*/false);
10282 CASE_FLT_FN (BUILT_IN_FREXP):
10283 return fold_builtin_frexp (loc, arg0, arg1, type);
10285 CASE_FLT_FN (BUILT_IN_MODF):
10286 return fold_builtin_modf (loc, arg0, arg1, type);
10288 case BUILT_IN_BZERO:
10289 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10291 case BUILT_IN_FPUTS:
10292 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10294 case BUILT_IN_FPUTS_UNLOCKED:
10295 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10297 case BUILT_IN_STRSTR:
10298 return fold_builtin_strstr (loc, arg0, arg1, type);
10300 case BUILT_IN_STRCAT:
10301 return fold_builtin_strcat (loc, arg0, arg1);
10303 case BUILT_IN_STRSPN:
10304 return fold_builtin_strspn (loc, arg0, arg1);
10306 case BUILT_IN_STRCSPN:
10307 return fold_builtin_strcspn (loc, arg0, arg1);
10309 case BUILT_IN_STRCHR:
10310 case BUILT_IN_INDEX:
10311 return fold_builtin_strchr (loc, arg0, arg1, type);
10313 case BUILT_IN_STRRCHR:
10314 case BUILT_IN_RINDEX:
10315 return fold_builtin_strrchr (loc, arg0, arg1, type);
10317 case BUILT_IN_STRCPY:
10318 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10320 case BUILT_IN_STPCPY:
10323 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10327 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10330 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10333 case BUILT_IN_STRCMP:
10334 return fold_builtin_strcmp (loc, arg0, arg1);
10336 case BUILT_IN_STRPBRK:
10337 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10339 case BUILT_IN_EXPECT:
10340 return fold_builtin_expect (loc, arg0, arg1);
10342 CASE_FLT_FN (BUILT_IN_POW):
10343 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10345 CASE_FLT_FN (BUILT_IN_POWI):
10346 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10348 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10349 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10351 CASE_FLT_FN (BUILT_IN_FMIN):
10352 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10354 CASE_FLT_FN (BUILT_IN_FMAX):
10355 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10357 case BUILT_IN_ISGREATER:
10358 return fold_builtin_unordered_cmp (loc, fndecl,
10359 arg0, arg1, UNLE_EXPR, LE_EXPR);
10360 case BUILT_IN_ISGREATEREQUAL:
10361 return fold_builtin_unordered_cmp (loc, fndecl,
10362 arg0, arg1, UNLT_EXPR, LT_EXPR);
10363 case BUILT_IN_ISLESS:
10364 return fold_builtin_unordered_cmp (loc, fndecl,
10365 arg0, arg1, UNGE_EXPR, GE_EXPR);
10366 case BUILT_IN_ISLESSEQUAL:
10367 return fold_builtin_unordered_cmp (loc, fndecl,
10368 arg0, arg1, UNGT_EXPR, GT_EXPR);
10369 case BUILT_IN_ISLESSGREATER:
10370 return fold_builtin_unordered_cmp (loc, fndecl,
10371 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10372 case BUILT_IN_ISUNORDERED:
10373 return fold_builtin_unordered_cmp (loc, fndecl,
10374 arg0, arg1, UNORDERED_EXPR,
10377 /* We do the folding for va_start in the expander. */
10378 case BUILT_IN_VA_START:
10381 case BUILT_IN_SPRINTF:
10382 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10384 case BUILT_IN_OBJECT_SIZE:
10385 return fold_builtin_object_size (arg0, arg1);
10387 case BUILT_IN_PRINTF:
10388 case BUILT_IN_PRINTF_UNLOCKED:
10389 case BUILT_IN_VPRINTF:
10390 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10392 case BUILT_IN_PRINTF_CHK:
10393 case BUILT_IN_VPRINTF_CHK:
10394 if (!validate_arg (arg0, INTEGER_TYPE)
10395 || TREE_SIDE_EFFECTS (arg0))
10398 return fold_builtin_printf (loc, fndecl,
10399 arg1, NULL_TREE, ignore, fcode);
10402 case BUILT_IN_FPRINTF:
10403 case BUILT_IN_FPRINTF_UNLOCKED:
10404 case BUILT_IN_VFPRINTF:
10405 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10414 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10415 and ARG2. IGNORE is true if the result of the function call is ignored.
10416 This function returns NULL_TREE if no simplification was possible. */
10419 fold_builtin_3 (location_t loc, tree fndecl,
10420 tree arg0, tree arg1, tree arg2, bool ignore)
10422 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10423 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10427 CASE_FLT_FN (BUILT_IN_SINCOS):
10428 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10430 CASE_FLT_FN (BUILT_IN_FMA):
10431 if (validate_arg (arg0, REAL_TYPE)
10432 && validate_arg(arg1, REAL_TYPE)
10433 && validate_arg(arg2, REAL_TYPE))
10434 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10437 CASE_FLT_FN (BUILT_IN_REMQUO):
10438 if (validate_arg (arg0, REAL_TYPE)
10439 && validate_arg(arg1, REAL_TYPE)
10440 && validate_arg(arg2, POINTER_TYPE))
10441 return do_mpfr_remquo (arg0, arg1, arg2);
10444 case BUILT_IN_MEMSET:
10445 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10447 case BUILT_IN_BCOPY:
10448 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10449 void_type_node, true, /*endp=*/3);
10451 case BUILT_IN_MEMCPY:
10452 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10453 type, ignore, /*endp=*/0);
10455 case BUILT_IN_MEMPCPY:
10456 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10457 type, ignore, /*endp=*/1);
10459 case BUILT_IN_MEMMOVE:
10460 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10461 type, ignore, /*endp=*/3);
10463 case BUILT_IN_STRNCAT:
10464 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10466 case BUILT_IN_STRNCPY:
10467 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10469 case BUILT_IN_STRNCMP:
10470 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10472 case BUILT_IN_MEMCHR:
10473 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10475 case BUILT_IN_BCMP:
10476 case BUILT_IN_MEMCMP:
10477 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10479 case BUILT_IN_SPRINTF:
10480 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10482 case BUILT_IN_STRCPY_CHK:
10483 case BUILT_IN_STPCPY_CHK:
10484 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10487 case BUILT_IN_STRCAT_CHK:
10488 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10490 case BUILT_IN_PRINTF_CHK:
10491 case BUILT_IN_VPRINTF_CHK:
10492 if (!validate_arg (arg0, INTEGER_TYPE)
10493 || TREE_SIDE_EFFECTS (arg0))
10496 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10499 case BUILT_IN_FPRINTF:
10500 case BUILT_IN_FPRINTF_UNLOCKED:
10501 case BUILT_IN_VFPRINTF:
10502 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10505 case BUILT_IN_FPRINTF_CHK:
10506 case BUILT_IN_VFPRINTF_CHK:
10507 if (!validate_arg (arg1, INTEGER_TYPE)
10508 || TREE_SIDE_EFFECTS (arg1))
10511 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10520 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10521 ARG2, and ARG3. IGNORE is true if the result of the function call is
10522 ignored. This function returns NULL_TREE if no simplification was
10526 fold_builtin_4 (location_t loc, tree fndecl,
10527 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10529 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10533 case BUILT_IN_MEMCPY_CHK:
10534 case BUILT_IN_MEMPCPY_CHK:
10535 case BUILT_IN_MEMMOVE_CHK:
10536 case BUILT_IN_MEMSET_CHK:
10537 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10539 DECL_FUNCTION_CODE (fndecl));
10541 case BUILT_IN_STRNCPY_CHK:
10542 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10544 case BUILT_IN_STRNCAT_CHK:
10545 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10547 case BUILT_IN_FPRINTF_CHK:
10548 case BUILT_IN_VFPRINTF_CHK:
10549 if (!validate_arg (arg1, INTEGER_TYPE)
10550 || TREE_SIDE_EFFECTS (arg1))
10553 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10563 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10564 arguments, where NARGS <= 4. IGNORE is true if the result of the
10565 function call is ignored. This function returns NULL_TREE if no
10566 simplification was possible. Note that this only folds builtins with
10567 fixed argument patterns. Foldings that do varargs-to-varargs
10568 transformations, or that match calls with more than 4 arguments,
10569 need to be handled with fold_builtin_varargs instead. */
10571 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10574 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10576 tree ret = NULL_TREE;
10581 ret = fold_builtin_0 (loc, fndecl, ignore);
10584 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10587 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10590 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10593 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10601 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10602 SET_EXPR_LOCATION (ret, loc);
10603 TREE_NO_WARNING (ret) = 1;
10609 /* Builtins with folding operations that operate on "..." arguments
10610 need special handling; we need to store the arguments in a convenient
10611 data structure before attempting any folding. Fortunately there are
10612 only a few builtins that fall into this category. FNDECL is the
10613 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10614 result of the function call is ignored. */
10617 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10618 bool ignore ATTRIBUTE_UNUSED)
10620 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10621 tree ret = NULL_TREE;
10625 case BUILT_IN_SPRINTF_CHK:
10626 case BUILT_IN_VSPRINTF_CHK:
10627 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10630 case BUILT_IN_SNPRINTF_CHK:
10631 case BUILT_IN_VSNPRINTF_CHK:
10632 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10635 case BUILT_IN_FPCLASSIFY:
10636 ret = fold_builtin_fpclassify (loc, exp);
10644 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10645 SET_EXPR_LOCATION (ret, loc);
10646 TREE_NO_WARNING (ret) = 1;
10652 /* Return true if FNDECL shouldn't be folded right now.
10653 If a built-in function has an inline attribute always_inline
10654 wrapper, defer folding it after always_inline functions have
10655 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10656 might not be performed. */
10659 avoid_folding_inline_builtin (tree fndecl)
10661 return (DECL_DECLARED_INLINE_P (fndecl)
10662 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10664 && !cfun->always_inline_functions_inlined
10665 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10668 /* A wrapper function for builtin folding that prevents warnings for
10669 "statement without effect" and the like, caused by removing the
10670 call node earlier than the warning is generated. */
10673 fold_call_expr (location_t loc, tree exp, bool ignore)
10675 tree ret = NULL_TREE;
10676 tree fndecl = get_callee_fndecl (exp);
10678 && TREE_CODE (fndecl) == FUNCTION_DECL
10679 && DECL_BUILT_IN (fndecl)
10680 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10681 yet. Defer folding until we see all the arguments
10682 (after inlining). */
10683 && !CALL_EXPR_VA_ARG_PACK (exp))
10685 int nargs = call_expr_nargs (exp);
10687 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10688 instead last argument is __builtin_va_arg_pack (). Defer folding
10689 even in that case, until arguments are finalized. */
10690 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10692 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10694 && TREE_CODE (fndecl2) == FUNCTION_DECL
10695 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10696 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10700 if (avoid_folding_inline_builtin (fndecl))
10703 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10704 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10705 CALL_EXPR_ARGP (exp), ignore);
10708 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10710 tree *args = CALL_EXPR_ARGP (exp);
10711 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10714 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10722 /* Conveniently construct a function call expression. FNDECL names the
10723 function to be called and N arguments are passed in the array
10727 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10729 tree fntype = TREE_TYPE (fndecl);
10730 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10732 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10735 /* Conveniently construct a function call expression. FNDECL names the
10736 function to be called and the arguments are passed in the vector
10740 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10742 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10743 VEC_address (tree, vec));
10747 /* Conveniently construct a function call expression. FNDECL names the
10748 function to be called, N is the number of arguments, and the "..."
10749 parameters are the argument expressions. */
10752 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10755 tree *argarray = XALLOCAVEC (tree, n);
10759 for (i = 0; i < n; i++)
10760 argarray[i] = va_arg (ap, tree);
10762 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10765 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10766 varargs macros aren't supported by all bootstrap compilers. */
10769 build_call_expr (tree fndecl, int n, ...)
10772 tree *argarray = XALLOCAVEC (tree, n);
10776 for (i = 0; i < n; i++)
10777 argarray[i] = va_arg (ap, tree);
10779 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10782 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10783 N arguments are passed in the array ARGARRAY. */
10786 fold_builtin_call_array (location_t loc, tree type,
10791 tree ret = NULL_TREE;
10794 if (TREE_CODE (fn) == ADDR_EXPR)
10796 tree fndecl = TREE_OPERAND (fn, 0);
10797 if (TREE_CODE (fndecl) == FUNCTION_DECL
10798 && DECL_BUILT_IN (fndecl))
10800 /* If last argument is __builtin_va_arg_pack (), arguments to this
10801 function are not finalized yet. Defer folding until they are. */
10802 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10804 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10806 && TREE_CODE (fndecl2) == FUNCTION_DECL
10807 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10808 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10809 return build_call_array_loc (loc, type, fn, n, argarray);
10811 if (avoid_folding_inline_builtin (fndecl))
10812 return build_call_array_loc (loc, type, fn, n, argarray);
10813 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10815 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10819 return build_call_array_loc (loc, type, fn, n, argarray);
10821 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10823 /* First try the transformations that don't require consing up
10825 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10830 /* If we got this far, we need to build an exp. */
10831 exp = build_call_array_loc (loc, type, fn, n, argarray);
10832 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10833 return ret ? ret : exp;
10837 return build_call_array_loc (loc, type, fn, n, argarray);
10840 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10841 along with N new arguments specified as the "..." parameters. SKIP
10842 is the number of arguments in EXP to be omitted. This function is used
10843 to do varargs-to-varargs transformations. */
10846 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10848 int oldnargs = call_expr_nargs (exp);
10849 int nargs = oldnargs - skip + n;
10850 tree fntype = TREE_TYPE (fndecl);
10851 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10859 buffer = XALLOCAVEC (tree, nargs);
10861 for (i = 0; i < n; i++)
10862 buffer[i] = va_arg (ap, tree);
10864 for (j = skip; j < oldnargs; j++, i++)
10865 buffer[i] = CALL_EXPR_ARG (exp, j);
10868 buffer = CALL_EXPR_ARGP (exp) + skip;
10870 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10873 /* Validate a single argument ARG against a tree code CODE representing
10877 validate_arg (const_tree arg, enum tree_code code)
10881 else if (code == POINTER_TYPE)
10882 return POINTER_TYPE_P (TREE_TYPE (arg));
10883 else if (code == INTEGER_TYPE)
10884 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10885 return code == TREE_CODE (TREE_TYPE (arg));
10888 /* This function validates the types of a function call argument list
10889 against a specified list of tree_codes. If the last specifier is a 0,
10890 that represents an ellipses, otherwise the last specifier must be a
10893 This is the GIMPLE version of validate_arglist. Eventually we want to
10894 completely convert builtins.c to work from GIMPLEs and the tree based
10895 validate_arglist will then be removed. */
10898 validate_gimple_arglist (const_gimple call, ...)
10900 enum tree_code code;
10906 va_start (ap, call);
10911 code = (enum tree_code) va_arg (ap, int);
10915 /* This signifies an ellipses, any further arguments are all ok. */
10919 /* This signifies an endlink, if no arguments remain, return
10920 true, otherwise return false. */
10921 res = (i == gimple_call_num_args (call));
10924 /* If no parameters remain or the parameter's code does not
10925 match the specified code, return false. Otherwise continue
10926 checking any remaining arguments. */
10927 arg = gimple_call_arg (call, i++);
10928 if (!validate_arg (arg, code))
10935 /* We need gotos here since we can only have one VA_CLOSE in a
10943 /* This function validates the types of a function call argument list
10944 against a specified list of tree_codes. If the last specifier is a 0,
10945 that represents an ellipses, otherwise the last specifier must be a
10949 validate_arglist (const_tree callexpr, ...)
10951 enum tree_code code;
10954 const_call_expr_arg_iterator iter;
10957 va_start (ap, callexpr);
10958 init_const_call_expr_arg_iterator (callexpr, &iter);
10962 code = (enum tree_code) va_arg (ap, int);
10966 /* This signifies an ellipses, any further arguments are all ok. */
10970 /* This signifies an endlink, if no arguments remain, return
10971 true, otherwise return false. */
10972 res = !more_const_call_expr_args_p (&iter);
10975 /* If no parameters remain or the parameter's code does not
10976 match the specified code, return false. Otherwise continue
10977 checking any remaining arguments. */
10978 arg = next_const_call_expr_arg (&iter);
10979 if (!validate_arg (arg, code))
10986 /* We need gotos here since we can only have one VA_CLOSE in a
10994 /* Default target-specific builtin expander that does nothing. */
10997 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10998 rtx target ATTRIBUTE_UNUSED,
10999 rtx subtarget ATTRIBUTE_UNUSED,
11000 enum machine_mode mode ATTRIBUTE_UNUSED,
11001 int ignore ATTRIBUTE_UNUSED)
11006 /* Returns true is EXP represents data that would potentially reside
11007 in a readonly section. */
11010 readonly_data_expr (tree exp)
11014 if (TREE_CODE (exp) != ADDR_EXPR)
11017 exp = get_base_address (TREE_OPERAND (exp, 0));
11021 /* Make sure we call decl_readonly_section only for trees it
11022 can handle (since it returns true for everything it doesn't
11024 if (TREE_CODE (exp) == STRING_CST
11025 || TREE_CODE (exp) == CONSTRUCTOR
11026 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11027 return decl_readonly_section (exp, 0);
11032 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11033 to the call, and TYPE is its return type.
11035 Return NULL_TREE if no simplification was possible, otherwise return the
11036 simplified form of the call as a tree.
11038 The simplified form may be a constant or other expression which
11039 computes the same value, but in a more efficient manner (including
11040 calls to other builtin functions).
11042 The call may contain arguments which need to be evaluated, but
11043 which are not useful to determine the result of the call. In
11044 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11045 COMPOUND_EXPR will be an argument which must be evaluated.
11046 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11047 COMPOUND_EXPR in the chain will contain the tree for the simplified
11048 form of the builtin function call. */
11051 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11053 if (!validate_arg (s1, POINTER_TYPE)
11054 || !validate_arg (s2, POINTER_TYPE))
11059 const char *p1, *p2;
11061 p2 = c_getstr (s2);
11065 p1 = c_getstr (s1);
11068 const char *r = strstr (p1, p2);
11072 return build_int_cst (TREE_TYPE (s1), 0);
11074 /* Return an offset into the constant string argument. */
11075 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11076 s1, size_int (r - p1));
11077 return fold_convert_loc (loc, type, tem);
11080 /* The argument is const char *, and the result is char *, so we need
11081 a type conversion here to avoid a warning. */
11083 return fold_convert_loc (loc, type, s1);
11088 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11092 /* New argument list transforming strstr(s1, s2) to
11093 strchr(s1, s2[0]). */
11094 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11098 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11099 the call, and TYPE is its return type.
11101 Return NULL_TREE if no simplification was possible, otherwise return the
11102 simplified form of the call as a tree.
11104 The simplified form may be a constant or other expression which
11105 computes the same value, but in a more efficient manner (including
11106 calls to other builtin functions).
11108 The call may contain arguments which need to be evaluated, but
11109 which are not useful to determine the result of the call. In
11110 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11111 COMPOUND_EXPR will be an argument which must be evaluated.
11112 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11113 COMPOUND_EXPR in the chain will contain the tree for the simplified
11114 form of the builtin function call. */
11117 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11119 if (!validate_arg (s1, POINTER_TYPE)
11120 || !validate_arg (s2, INTEGER_TYPE))
11126 if (TREE_CODE (s2) != INTEGER_CST)
11129 p1 = c_getstr (s1);
11136 if (target_char_cast (s2, &c))
11139 r = strchr (p1, c);
11142 return build_int_cst (TREE_TYPE (s1), 0);
11144 /* Return an offset into the constant string argument. */
11145 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11146 s1, size_int (r - p1));
11147 return fold_convert_loc (loc, type, tem);
11153 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11154 the call, and TYPE is its return type.
11156 Return NULL_TREE if no simplification was possible, otherwise return the
11157 simplified form of the call as a tree.
11159 The simplified form may be a constant or other expression which
11160 computes the same value, but in a more efficient manner (including
11161 calls to other builtin functions).
11163 The call may contain arguments which need to be evaluated, but
11164 which are not useful to determine the result of the call. In
11165 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11166 COMPOUND_EXPR will be an argument which must be evaluated.
11167 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11168 COMPOUND_EXPR in the chain will contain the tree for the simplified
11169 form of the builtin function call. */
11172 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11174 if (!validate_arg (s1, POINTER_TYPE)
11175 || !validate_arg (s2, INTEGER_TYPE))
11182 if (TREE_CODE (s2) != INTEGER_CST)
11185 p1 = c_getstr (s1);
11192 if (target_char_cast (s2, &c))
11195 r = strrchr (p1, c);
11198 return build_int_cst (TREE_TYPE (s1), 0);
11200 /* Return an offset into the constant string argument. */
11201 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11202 s1, size_int (r - p1));
11203 return fold_convert_loc (loc, type, tem);
11206 if (! integer_zerop (s2))
11209 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11213 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11214 return build_call_expr_loc (loc, fn, 2, s1, s2);
11218 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11219 to the call, and TYPE is its return type.
11221 Return NULL_TREE if no simplification was possible, otherwise return the
11222 simplified form of the call as a tree.
11224 The simplified form may be a constant or other expression which
11225 computes the same value, but in a more efficient manner (including
11226 calls to other builtin functions).
11228 The call may contain arguments which need to be evaluated, but
11229 which are not useful to determine the result of the call. In
11230 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11231 COMPOUND_EXPR will be an argument which must be evaluated.
11232 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11233 COMPOUND_EXPR in the chain will contain the tree for the simplified
11234 form of the builtin function call. */
11237 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11239 if (!validate_arg (s1, POINTER_TYPE)
11240 || !validate_arg (s2, POINTER_TYPE))
11245 const char *p1, *p2;
11247 p2 = c_getstr (s2);
11251 p1 = c_getstr (s1);
11254 const char *r = strpbrk (p1, p2);
11258 return build_int_cst (TREE_TYPE (s1), 0);
11260 /* Return an offset into the constant string argument. */
11261 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11262 s1, size_int (r - p1));
11263 return fold_convert_loc (loc, type, tem);
11267 /* strpbrk(x, "") == NULL.
11268 Evaluate and ignore s1 in case it had side-effects. */
11269 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11272 return NULL_TREE; /* Really call strpbrk. */
11274 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11278 /* New argument list transforming strpbrk(s1, s2) to
11279 strchr(s1, s2[0]). */
11280 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11284 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11287 Return NULL_TREE if no simplification was possible, otherwise return the
11288 simplified form of the call as a tree.
11290 The simplified form may be a constant or other expression which
11291 computes the same value, but in a more efficient manner (including
11292 calls to other builtin functions).
11294 The call may contain arguments which need to be evaluated, but
11295 which are not useful to determine the result of the call. In
11296 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11297 COMPOUND_EXPR will be an argument which must be evaluated.
11298 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11299 COMPOUND_EXPR in the chain will contain the tree for the simplified
11300 form of the builtin function call. */
11303 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11305 if (!validate_arg (dst, POINTER_TYPE)
11306 || !validate_arg (src, POINTER_TYPE))
11310 const char *p = c_getstr (src);
11312 /* If the string length is zero, return the dst parameter. */
11313 if (p && *p == '\0')
11316 if (optimize_insn_for_speed_p ())
11318 /* See if we can store by pieces into (dst + strlen(dst)). */
11320 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11321 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11323 if (!strlen_fn || !strcpy_fn)
11326 /* If we don't have a movstr we don't want to emit an strcpy
11327 call. We have to do that if the length of the source string
11328 isn't computable (in that case we can use memcpy probably
11329 later expanding to a sequence of mov instructions). If we
11330 have movstr instructions we can emit strcpy calls. */
11333 tree len = c_strlen (src, 1);
11334 if (! len || TREE_SIDE_EFFECTS (len))
11338 /* Stabilize the argument list. */
11339 dst = builtin_save_expr (dst);
11341 /* Create strlen (dst). */
11342 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11343 /* Create (dst p+ strlen (dst)). */
11345 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11346 TREE_TYPE (dst), dst, newdst);
11347 newdst = builtin_save_expr (newdst);
11349 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11350 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11356 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11357 arguments to the call.
11359 Return NULL_TREE if no simplification was possible, otherwise return the
11360 simplified form of the call as a tree.
11362 The simplified form may be a constant or other expression which
11363 computes the same value, but in a more efficient manner (including
11364 calls to other builtin functions).
11366 The call may contain arguments which need to be evaluated, but
11367 which are not useful to determine the result of the call. In
11368 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11369 COMPOUND_EXPR will be an argument which must be evaluated.
11370 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11371 COMPOUND_EXPR in the chain will contain the tree for the simplified
11372 form of the builtin function call. */
11375 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11377 if (!validate_arg (dst, POINTER_TYPE)
11378 || !validate_arg (src, POINTER_TYPE)
11379 || !validate_arg (len, INTEGER_TYPE))
11383 const char *p = c_getstr (src);
11385 /* If the requested length is zero, or the src parameter string
11386 length is zero, return the dst parameter. */
11387 if (integer_zerop (len) || (p && *p == '\0'))
11388 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11390 /* If the requested len is greater than or equal to the string
11391 length, call strcat. */
11392 if (TREE_CODE (len) == INTEGER_CST && p
11393 && compare_tree_int (len, strlen (p)) >= 0)
11395 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11397 /* If the replacement _DECL isn't initialized, don't do the
11402 return build_call_expr_loc (loc, fn, 2, dst, src);
11408 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11411 Return NULL_TREE if no simplification was possible, otherwise return the
11412 simplified form of the call as a tree.
11414 The simplified form may be a constant or other expression which
11415 computes the same value, but in a more efficient manner (including
11416 calls to other builtin functions).
11418 The call may contain arguments which need to be evaluated, but
11419 which are not useful to determine the result of the call. In
11420 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11421 COMPOUND_EXPR will be an argument which must be evaluated.
11422 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11423 COMPOUND_EXPR in the chain will contain the tree for the simplified
11424 form of the builtin function call. */
11427 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11429 if (!validate_arg (s1, POINTER_TYPE)
11430 || !validate_arg (s2, POINTER_TYPE))
11434 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11436 /* If both arguments are constants, evaluate at compile-time. */
11439 const size_t r = strspn (p1, p2);
11440 return size_int (r);
11443 /* If either argument is "", return NULL_TREE. */
11444 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11445 /* Evaluate and ignore both arguments in case either one has
11447 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11453 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11456 Return NULL_TREE if no simplification was possible, otherwise return the
11457 simplified form of the call as a tree.
11459 The simplified form may be a constant or other expression which
11460 computes the same value, but in a more efficient manner (including
11461 calls to other builtin functions).
11463 The call may contain arguments which need to be evaluated, but
11464 which are not useful to determine the result of the call. In
11465 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11466 COMPOUND_EXPR will be an argument which must be evaluated.
11467 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11468 COMPOUND_EXPR in the chain will contain the tree for the simplified
11469 form of the builtin function call. */
11472 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11474 if (!validate_arg (s1, POINTER_TYPE)
11475 || !validate_arg (s2, POINTER_TYPE))
11479 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11481 /* If both arguments are constants, evaluate at compile-time. */
11484 const size_t r = strcspn (p1, p2);
11485 return size_int (r);
11488 /* If the first argument is "", return NULL_TREE. */
11489 if (p1 && *p1 == '\0')
11491 /* Evaluate and ignore argument s2 in case it has
11493 return omit_one_operand_loc (loc, size_type_node,
11494 size_zero_node, s2);
11497 /* If the second argument is "", return __builtin_strlen(s1). */
11498 if (p2 && *p2 == '\0')
11500 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11502 /* If the replacement _DECL isn't initialized, don't do the
11507 return build_call_expr_loc (loc, fn, 1, s1);
11513 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11514 to the call. IGNORE is true if the value returned
11515 by the builtin will be ignored. UNLOCKED is true is true if this
11516 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11517 the known length of the string. Return NULL_TREE if no simplification
11521 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11522 bool ignore, bool unlocked, tree len)
11524 /* If we're using an unlocked function, assume the other unlocked
11525 functions exist explicitly. */
11526 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11527 : implicit_built_in_decls[BUILT_IN_FPUTC];
11528 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11529 : implicit_built_in_decls[BUILT_IN_FWRITE];
11531 /* If the return value is used, don't do the transformation. */
11535 /* Verify the arguments in the original call. */
11536 if (!validate_arg (arg0, POINTER_TYPE)
11537 || !validate_arg (arg1, POINTER_TYPE))
11541 len = c_strlen (arg0, 0);
11543 /* Get the length of the string passed to fputs. If the length
11544 can't be determined, punt. */
11546 || TREE_CODE (len) != INTEGER_CST)
11549 switch (compare_tree_int (len, 1))
11551 case -1: /* length is 0, delete the call entirely . */
11552 return omit_one_operand_loc (loc, integer_type_node,
11553 integer_zero_node, arg1);;
11555 case 0: /* length is 1, call fputc. */
11557 const char *p = c_getstr (arg0);
11562 return build_call_expr_loc (loc, fn_fputc, 2,
11563 build_int_cst (NULL_TREE, p[0]), arg1);
11569 case 1: /* length is greater than 1, call fwrite. */
11571 /* If optimizing for size keep fputs. */
11572 if (optimize_function_for_size_p (cfun))
11574 /* New argument list transforming fputs(string, stream) to
11575 fwrite(string, 1, len, stream). */
11577 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11578 size_one_node, len, arg1);
11583 gcc_unreachable ();
11588 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11589 produced. False otherwise. This is done so that we don't output the error
11590 or warning twice or three times. */
11593 fold_builtin_next_arg (tree exp, bool va_start_p)
11595 tree fntype = TREE_TYPE (current_function_decl);
11596 int nargs = call_expr_nargs (exp);
11599 if (!stdarg_p (fntype))
11601 error ("%<va_start%> used in function with fixed args");
11607 if (va_start_p && (nargs != 2))
11609 error ("wrong number of arguments to function %<va_start%>");
11612 arg = CALL_EXPR_ARG (exp, 1);
11614 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11615 when we checked the arguments and if needed issued a warning. */
11620 /* Evidently an out of date version of <stdarg.h>; can't validate
11621 va_start's second argument, but can still work as intended. */
11622 warning (0, "%<__builtin_next_arg%> called without an argument");
11625 else if (nargs > 1)
11627 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11630 arg = CALL_EXPR_ARG (exp, 0);
11633 if (TREE_CODE (arg) == SSA_NAME)
11634 arg = SSA_NAME_VAR (arg);
11636 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11637 or __builtin_next_arg (0) the first time we see it, after checking
11638 the arguments and if needed issuing a warning. */
11639 if (!integer_zerop (arg))
11641 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11643 /* Strip off all nops for the sake of the comparison. This
11644 is not quite the same as STRIP_NOPS. It does more.
11645 We must also strip off INDIRECT_EXPR for C++ reference
11647 while (CONVERT_EXPR_P (arg)
11648 || TREE_CODE (arg) == INDIRECT_REF)
11649 arg = TREE_OPERAND (arg, 0);
11650 if (arg != last_parm)
11652 /* FIXME: Sometimes with the tree optimizers we can get the
11653 not the last argument even though the user used the last
11654 argument. We just warn and set the arg to be the last
11655 argument so that we will get wrong-code because of
11657 warning (0, "second parameter of %<va_start%> not last named argument");
11660 /* Undefined by C99 7.15.1.4p4 (va_start):
11661 "If the parameter parmN is declared with the register storage
11662 class, with a function or array type, or with a type that is
11663 not compatible with the type that results after application of
11664 the default argument promotions, the behavior is undefined."
11666 else if (DECL_REGISTER (arg))
11667 warning (0, "undefined behaviour when second parameter of "
11668 "%<va_start%> is declared with %<register%> storage");
11670 /* We want to verify the second parameter just once before the tree
11671 optimizers are run and then avoid keeping it in the tree,
11672 as otherwise we could warn even for correct code like:
11673 void foo (int i, ...)
11674 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11676 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11678 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11684 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11685 ORIG may be null if this is a 2-argument call. We don't attempt to
11686 simplify calls with more than 3 arguments.
11688 Return NULL_TREE if no simplification was possible, otherwise return the
11689 simplified form of the call as a tree. If IGNORED is true, it means that
11690 the caller does not use the returned value of the function. */
11693 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11694 tree orig, int ignored)
11697 const char *fmt_str = NULL;
11699 /* Verify the required arguments in the original call. We deal with two
11700 types of sprintf() calls: 'sprintf (str, fmt)' and
11701 'sprintf (dest, "%s", orig)'. */
11702 if (!validate_arg (dest, POINTER_TYPE)
11703 || !validate_arg (fmt, POINTER_TYPE))
11705 if (orig && !validate_arg (orig, POINTER_TYPE))
11708 /* Check whether the format is a literal string constant. */
11709 fmt_str = c_getstr (fmt);
11710 if (fmt_str == NULL)
11714 retval = NULL_TREE;
11716 if (!init_target_chars ())
11719 /* If the format doesn't contain % args or %%, use strcpy. */
11720 if (strchr (fmt_str, target_percent) == NULL)
11722 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11727 /* Don't optimize sprintf (buf, "abc", ptr++). */
11731 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11732 'format' is known to contain no % formats. */
11733 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11735 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11738 /* If the format is "%s", use strcpy if the result isn't used. */
11739 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11742 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11747 /* Don't crash on sprintf (str1, "%s"). */
11751 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11754 retval = c_strlen (orig, 1);
11755 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11758 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11761 if (call && retval)
11763 retval = fold_convert_loc
11764 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11766 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11772 /* Expand a call EXP to __builtin_object_size. */
11775 expand_builtin_object_size (tree exp)
11778 int object_size_type;
11779 tree fndecl = get_callee_fndecl (exp);
11781 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11783 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11785 expand_builtin_trap ();
11789 ost = CALL_EXPR_ARG (exp, 1);
11792 if (TREE_CODE (ost) != INTEGER_CST
11793 || tree_int_cst_sgn (ost) < 0
11794 || compare_tree_int (ost, 3) > 0)
11796 error ("%Klast argument of %D is not integer constant between 0 and 3",
11798 expand_builtin_trap ();
11802 object_size_type = tree_low_cst (ost, 0);
11804 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11807 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11808 FCODE is the BUILT_IN_* to use.
11809 Return NULL_RTX if we failed; the caller should emit a normal call,
11810 otherwise try to get the result in TARGET, if convenient (and in
11811 mode MODE if that's convenient). */
11814 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11815 enum built_in_function fcode)
11817 tree dest, src, len, size;
11819 if (!validate_arglist (exp,
11821 fcode == BUILT_IN_MEMSET_CHK
11822 ? INTEGER_TYPE : POINTER_TYPE,
11823 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11826 dest = CALL_EXPR_ARG (exp, 0);
11827 src = CALL_EXPR_ARG (exp, 1);
11828 len = CALL_EXPR_ARG (exp, 2);
11829 size = CALL_EXPR_ARG (exp, 3);
11831 if (! host_integerp (size, 1))
11834 if (host_integerp (len, 1) || integer_all_onesp (size))
11838 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11840 warning_at (tree_nonartificial_location (exp),
11841 0, "%Kcall to %D will always overflow destination buffer",
11842 exp, get_callee_fndecl (exp));
11847 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11848 mem{cpy,pcpy,move,set} is available. */
11851 case BUILT_IN_MEMCPY_CHK:
11852 fn = built_in_decls[BUILT_IN_MEMCPY];
11854 case BUILT_IN_MEMPCPY_CHK:
11855 fn = built_in_decls[BUILT_IN_MEMPCPY];
11857 case BUILT_IN_MEMMOVE_CHK:
11858 fn = built_in_decls[BUILT_IN_MEMMOVE];
11860 case BUILT_IN_MEMSET_CHK:
11861 fn = built_in_decls[BUILT_IN_MEMSET];
11870 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11871 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11872 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11873 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11875 else if (fcode == BUILT_IN_MEMSET_CHK)
11879 unsigned int dest_align
11880 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11882 /* If DEST is not a pointer type, call the normal function. */
11883 if (dest_align == 0)
11886 /* If SRC and DEST are the same (and not volatile), do nothing. */
11887 if (operand_equal_p (src, dest, 0))
11891 if (fcode != BUILT_IN_MEMPCPY_CHK)
11893 /* Evaluate and ignore LEN in case it has side-effects. */
11894 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11895 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11898 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11899 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11902 /* __memmove_chk special case. */
11903 if (fcode == BUILT_IN_MEMMOVE_CHK)
11905 unsigned int src_align
11906 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11908 if (src_align == 0)
11911 /* If src is categorized for a readonly section we can use
11912 normal __memcpy_chk. */
11913 if (readonly_data_expr (src))
11915 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11918 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11919 dest, src, len, size);
11920 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11921 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11922 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11929 /* Emit warning if a buffer overflow is detected at compile time. */
11932 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11936 location_t loc = tree_nonartificial_location (exp);
11940 case BUILT_IN_STRCPY_CHK:
11941 case BUILT_IN_STPCPY_CHK:
11942 /* For __strcat_chk the warning will be emitted only if overflowing
11943 by at least strlen (dest) + 1 bytes. */
11944 case BUILT_IN_STRCAT_CHK:
11945 len = CALL_EXPR_ARG (exp, 1);
11946 size = CALL_EXPR_ARG (exp, 2);
11949 case BUILT_IN_STRNCAT_CHK:
11950 case BUILT_IN_STRNCPY_CHK:
11951 len = CALL_EXPR_ARG (exp, 2);
11952 size = CALL_EXPR_ARG (exp, 3);
11954 case BUILT_IN_SNPRINTF_CHK:
11955 case BUILT_IN_VSNPRINTF_CHK:
11956 len = CALL_EXPR_ARG (exp, 1);
11957 size = CALL_EXPR_ARG (exp, 3);
11960 gcc_unreachable ();
11966 if (! host_integerp (size, 1) || integer_all_onesp (size))
11971 len = c_strlen (len, 1);
11972 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11975 else if (fcode == BUILT_IN_STRNCAT_CHK)
11977 tree src = CALL_EXPR_ARG (exp, 1);
11978 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11980 src = c_strlen (src, 1);
11981 if (! src || ! host_integerp (src, 1))
11983 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11984 exp, get_callee_fndecl (exp));
11987 else if (tree_int_cst_lt (src, size))
11990 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11993 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11994 exp, get_callee_fndecl (exp));
11997 /* Emit warning if a buffer overflow is detected at compile time
11998 in __sprintf_chk/__vsprintf_chk calls. */
12001 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12003 tree size, len, fmt;
12004 const char *fmt_str;
12005 int nargs = call_expr_nargs (exp);
12007 /* Verify the required arguments in the original call. */
12011 size = CALL_EXPR_ARG (exp, 2);
12012 fmt = CALL_EXPR_ARG (exp, 3);
12014 if (! host_integerp (size, 1) || integer_all_onesp (size))
12017 /* Check whether the format is a literal string constant. */
12018 fmt_str = c_getstr (fmt);
12019 if (fmt_str == NULL)
12022 if (!init_target_chars ())
12025 /* If the format doesn't contain % args or %%, we know its size. */
12026 if (strchr (fmt_str, target_percent) == 0)
12027 len = build_int_cstu (size_type_node, strlen (fmt_str));
12028 /* If the format is "%s" and first ... argument is a string literal,
12030 else if (fcode == BUILT_IN_SPRINTF_CHK
12031 && strcmp (fmt_str, target_percent_s) == 0)
12037 arg = CALL_EXPR_ARG (exp, 4);
12038 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12041 len = c_strlen (arg, 1);
12042 if (!len || ! host_integerp (len, 1))
12048 if (! tree_int_cst_lt (len, size))
12049 warning_at (tree_nonartificial_location (exp),
12050 0, "%Kcall to %D will always overflow destination buffer",
12051 exp, get_callee_fndecl (exp));
12054 /* Emit warning if a free is called with address of a variable. */
12057 maybe_emit_free_warning (tree exp)
12059 tree arg = CALL_EXPR_ARG (exp, 0);
12062 if (TREE_CODE (arg) != ADDR_EXPR)
12065 arg = get_base_address (TREE_OPERAND (arg, 0));
12066 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12069 if (SSA_VAR_P (arg))
12070 warning_at (tree_nonartificial_location (exp),
12071 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12073 warning_at (tree_nonartificial_location (exp),
12074 0, "%Kattempt to free a non-heap object", exp);
12077 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12081 fold_builtin_object_size (tree ptr, tree ost)
12083 unsigned HOST_WIDE_INT bytes;
12084 int object_size_type;
12086 if (!validate_arg (ptr, POINTER_TYPE)
12087 || !validate_arg (ost, INTEGER_TYPE))
12092 if (TREE_CODE (ost) != INTEGER_CST
12093 || tree_int_cst_sgn (ost) < 0
12094 || compare_tree_int (ost, 3) > 0)
12097 object_size_type = tree_low_cst (ost, 0);
12099 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12100 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12101 and (size_t) 0 for types 2 and 3. */
12102 if (TREE_SIDE_EFFECTS (ptr))
12103 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12105 if (TREE_CODE (ptr) == ADDR_EXPR)
12107 bytes = compute_builtin_object_size (ptr, object_size_type);
12108 if (double_int_fits_to_tree_p (size_type_node,
12109 uhwi_to_double_int (bytes)))
12110 return build_int_cstu (size_type_node, bytes);
12112 else if (TREE_CODE (ptr) == SSA_NAME)
12114 /* If object size is not known yet, delay folding until
12115 later. Maybe subsequent passes will help determining
12117 bytes = compute_builtin_object_size (ptr, object_size_type);
12118 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12119 && double_int_fits_to_tree_p (size_type_node,
12120 uhwi_to_double_int (bytes)))
12121 return build_int_cstu (size_type_node, bytes);
12127 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12128 DEST, SRC, LEN, and SIZE are the arguments to the call.
12129 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12130 code of the builtin. If MAXLEN is not NULL, it is maximum length
12131 passed as third argument. */
12134 fold_builtin_memory_chk (location_t loc, tree fndecl,
12135 tree dest, tree src, tree len, tree size,
12136 tree maxlen, bool ignore,
12137 enum built_in_function fcode)
12141 if (!validate_arg (dest, POINTER_TYPE)
12142 || !validate_arg (src,
12143 (fcode == BUILT_IN_MEMSET_CHK
12144 ? INTEGER_TYPE : POINTER_TYPE))
12145 || !validate_arg (len, INTEGER_TYPE)
12146 || !validate_arg (size, INTEGER_TYPE))
12149 /* If SRC and DEST are the same (and not volatile), return DEST
12150 (resp. DEST+LEN for __mempcpy_chk). */
12151 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12153 if (fcode != BUILT_IN_MEMPCPY_CHK)
12154 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12158 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12160 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12164 if (! host_integerp (size, 1))
12167 if (! integer_all_onesp (size))
12169 if (! host_integerp (len, 1))
12171 /* If LEN is not constant, try MAXLEN too.
12172 For MAXLEN only allow optimizing into non-_ocs function
12173 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12174 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12176 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12178 /* (void) __mempcpy_chk () can be optimized into
12179 (void) __memcpy_chk (). */
12180 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12184 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12192 if (tree_int_cst_lt (size, maxlen))
12197 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12198 mem{cpy,pcpy,move,set} is available. */
12201 case BUILT_IN_MEMCPY_CHK:
12202 fn = built_in_decls[BUILT_IN_MEMCPY];
12204 case BUILT_IN_MEMPCPY_CHK:
12205 fn = built_in_decls[BUILT_IN_MEMPCPY];
12207 case BUILT_IN_MEMMOVE_CHK:
12208 fn = built_in_decls[BUILT_IN_MEMMOVE];
12210 case BUILT_IN_MEMSET_CHK:
12211 fn = built_in_decls[BUILT_IN_MEMSET];
12220 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12223 /* Fold a call to the __st[rp]cpy_chk builtin.
12224 DEST, SRC, and SIZE are the arguments to the call.
12225 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12226 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12227 strings passed as second argument. */
12230 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12231 tree src, tree size,
12232 tree maxlen, bool ignore,
12233 enum built_in_function fcode)
12237 if (!validate_arg (dest, POINTER_TYPE)
12238 || !validate_arg (src, POINTER_TYPE)
12239 || !validate_arg (size, INTEGER_TYPE))
12242 /* If SRC and DEST are the same (and not volatile), return DEST. */
12243 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12244 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12246 if (! host_integerp (size, 1))
12249 if (! integer_all_onesp (size))
12251 len = c_strlen (src, 1);
12252 if (! len || ! host_integerp (len, 1))
12254 /* If LEN is not constant, try MAXLEN too.
12255 For MAXLEN only allow optimizing into non-_ocs function
12256 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12257 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12259 if (fcode == BUILT_IN_STPCPY_CHK)
12264 /* If return value of __stpcpy_chk is ignored,
12265 optimize into __strcpy_chk. */
12266 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12270 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12273 if (! len || TREE_SIDE_EFFECTS (len))
12276 /* If c_strlen returned something, but not a constant,
12277 transform __strcpy_chk into __memcpy_chk. */
12278 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12282 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12283 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12284 build_call_expr_loc (loc, fn, 4,
12285 dest, src, len, size));
12291 if (! tree_int_cst_lt (maxlen, size))
12295 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12296 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12297 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12301 return build_call_expr_loc (loc, fn, 2, dest, src);
12304 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12305 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12306 length passed as third argument. */
12309 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12310 tree len, tree size, tree maxlen)
12314 if (!validate_arg (dest, POINTER_TYPE)
12315 || !validate_arg (src, POINTER_TYPE)
12316 || !validate_arg (len, INTEGER_TYPE)
12317 || !validate_arg (size, INTEGER_TYPE))
12320 if (! host_integerp (size, 1))
12323 if (! integer_all_onesp (size))
12325 if (! host_integerp (len, 1))
12327 /* If LEN is not constant, try MAXLEN too.
12328 For MAXLEN only allow optimizing into non-_ocs function
12329 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12330 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12336 if (tree_int_cst_lt (size, maxlen))
12340 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12341 fn = built_in_decls[BUILT_IN_STRNCPY];
12345 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12348 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12349 are the arguments to the call. */
12352 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12353 tree src, tree size)
12358 if (!validate_arg (dest, POINTER_TYPE)
12359 || !validate_arg (src, POINTER_TYPE)
12360 || !validate_arg (size, INTEGER_TYPE))
12363 p = c_getstr (src);
12364 /* If the SRC parameter is "", return DEST. */
12365 if (p && *p == '\0')
12366 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12368 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12371 /* If __builtin_strcat_chk is used, assume strcat is available. */
12372 fn = built_in_decls[BUILT_IN_STRCAT];
12376 return build_call_expr_loc (loc, fn, 2, dest, src);
12379 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12383 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12384 tree dest, tree src, tree len, tree size)
12389 if (!validate_arg (dest, POINTER_TYPE)
12390 || !validate_arg (src, POINTER_TYPE)
12391 || !validate_arg (size, INTEGER_TYPE)
12392 || !validate_arg (size, INTEGER_TYPE))
12395 p = c_getstr (src);
12396 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12397 if (p && *p == '\0')
12398 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12399 else if (integer_zerop (len))
12400 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12402 if (! host_integerp (size, 1))
12405 if (! integer_all_onesp (size))
12407 tree src_len = c_strlen (src, 1);
12409 && host_integerp (src_len, 1)
12410 && host_integerp (len, 1)
12411 && ! tree_int_cst_lt (len, src_len))
12413 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12414 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12418 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12423 /* If __builtin_strncat_chk is used, assume strncat is available. */
12424 fn = built_in_decls[BUILT_IN_STRNCAT];
12428 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12431 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12432 a normal call should be emitted rather than expanding the function
12433 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12436 fold_builtin_sprintf_chk (location_t loc, tree exp,
12437 enum built_in_function fcode)
12439 tree dest, size, len, fn, fmt, flag;
12440 const char *fmt_str;
12441 int nargs = call_expr_nargs (exp);
12443 /* Verify the required arguments in the original call. */
12446 dest = CALL_EXPR_ARG (exp, 0);
12447 if (!validate_arg (dest, POINTER_TYPE))
12449 flag = CALL_EXPR_ARG (exp, 1);
12450 if (!validate_arg (flag, INTEGER_TYPE))
12452 size = CALL_EXPR_ARG (exp, 2);
12453 if (!validate_arg (size, INTEGER_TYPE))
12455 fmt = CALL_EXPR_ARG (exp, 3);
12456 if (!validate_arg (fmt, POINTER_TYPE))
12459 if (! host_integerp (size, 1))
12464 if (!init_target_chars ())
12467 /* Check whether the format is a literal string constant. */
12468 fmt_str = c_getstr (fmt);
12469 if (fmt_str != NULL)
12471 /* If the format doesn't contain % args or %%, we know the size. */
12472 if (strchr (fmt_str, target_percent) == 0)
12474 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12475 len = build_int_cstu (size_type_node, strlen (fmt_str));
12477 /* If the format is "%s" and first ... argument is a string literal,
12478 we know the size too. */
12479 else if (fcode == BUILT_IN_SPRINTF_CHK
12480 && strcmp (fmt_str, target_percent_s) == 0)
12486 arg = CALL_EXPR_ARG (exp, 4);
12487 if (validate_arg (arg, POINTER_TYPE))
12489 len = c_strlen (arg, 1);
12490 if (! len || ! host_integerp (len, 1))
12497 if (! integer_all_onesp (size))
12499 if (! len || ! tree_int_cst_lt (len, size))
12503 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12504 or if format doesn't contain % chars or is "%s". */
12505 if (! integer_zerop (flag))
12507 if (fmt_str == NULL)
12509 if (strchr (fmt_str, target_percent) != NULL
12510 && strcmp (fmt_str, target_percent_s))
12514 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12515 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12516 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12520 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12523 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12524 a normal call should be emitted rather than expanding the function
12525 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12526 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12527 passed as second argument. */
12530 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12531 enum built_in_function fcode)
12533 tree dest, size, len, fn, fmt, flag;
12534 const char *fmt_str;
12536 /* Verify the required arguments in the original call. */
12537 if (call_expr_nargs (exp) < 5)
12539 dest = CALL_EXPR_ARG (exp, 0);
12540 if (!validate_arg (dest, POINTER_TYPE))
12542 len = CALL_EXPR_ARG (exp, 1);
12543 if (!validate_arg (len, INTEGER_TYPE))
12545 flag = CALL_EXPR_ARG (exp, 2);
12546 if (!validate_arg (flag, INTEGER_TYPE))
12548 size = CALL_EXPR_ARG (exp, 3);
12549 if (!validate_arg (size, INTEGER_TYPE))
12551 fmt = CALL_EXPR_ARG (exp, 4);
12552 if (!validate_arg (fmt, POINTER_TYPE))
12555 if (! host_integerp (size, 1))
12558 if (! integer_all_onesp (size))
12560 if (! host_integerp (len, 1))
12562 /* If LEN is not constant, try MAXLEN too.
12563 For MAXLEN only allow optimizing into non-_ocs function
12564 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12565 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12571 if (tree_int_cst_lt (size, maxlen))
12575 if (!init_target_chars ())
12578 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12579 or if format doesn't contain % chars or is "%s". */
12580 if (! integer_zerop (flag))
12582 fmt_str = c_getstr (fmt);
12583 if (fmt_str == NULL)
12585 if (strchr (fmt_str, target_percent) != NULL
12586 && strcmp (fmt_str, target_percent_s))
12590 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12592 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12593 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12597 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12600 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12601 FMT and ARG are the arguments to the call; we don't fold cases with
12602 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12604 Return NULL_TREE if no simplification was possible, otherwise return the
12605 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12606 code of the function to be simplified. */
12609 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12610 tree arg, bool ignore,
12611 enum built_in_function fcode)
12613 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12614 const char *fmt_str = NULL;
12616 /* If the return value is used, don't do the transformation. */
12620 /* Verify the required arguments in the original call. */
12621 if (!validate_arg (fmt, POINTER_TYPE))
12624 /* Check whether the format is a literal string constant. */
12625 fmt_str = c_getstr (fmt);
12626 if (fmt_str == NULL)
12629 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12631 /* If we're using an unlocked function, assume the other
12632 unlocked functions exist explicitly. */
12633 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12634 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12638 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12639 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12642 if (!init_target_chars ())
12645 if (strcmp (fmt_str, target_percent_s) == 0
12646 || strchr (fmt_str, target_percent) == NULL)
12650 if (strcmp (fmt_str, target_percent_s) == 0)
12652 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12655 if (!arg || !validate_arg (arg, POINTER_TYPE))
12658 str = c_getstr (arg);
12664 /* The format specifier doesn't contain any '%' characters. */
12665 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12671 /* If the string was "", printf does nothing. */
12672 if (str[0] == '\0')
12673 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12675 /* If the string has length of 1, call putchar. */
12676 if (str[1] == '\0')
12678 /* Given printf("c"), (where c is any one character,)
12679 convert "c"[0] to an int and pass that to the replacement
12681 newarg = build_int_cst (NULL_TREE, str[0]);
12683 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12687 /* If the string was "string\n", call puts("string"). */
12688 size_t len = strlen (str);
12689 if ((unsigned char)str[len - 1] == target_newline)
12691 /* Create a NUL-terminated string that's one char shorter
12692 than the original, stripping off the trailing '\n'. */
12693 char *newstr = XALLOCAVEC (char, len);
12694 memcpy (newstr, str, len - 1);
12695 newstr[len - 1] = 0;
12697 newarg = build_string_literal (len, newstr);
12699 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12702 /* We'd like to arrange to call fputs(string,stdout) here,
12703 but we need stdout and don't have a way to get it yet. */
12708 /* The other optimizations can be done only on the non-va_list variants. */
12709 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12712 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12713 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12715 if (!arg || !validate_arg (arg, POINTER_TYPE))
12718 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12721 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12722 else if (strcmp (fmt_str, target_percent_c) == 0)
12724 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12727 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12733 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12736 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12737 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12738 more than 3 arguments, and ARG may be null in the 2-argument case.
12740 Return NULL_TREE if no simplification was possible, otherwise return the
12741 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12742 code of the function to be simplified. */
12745 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12746 tree fmt, tree arg, bool ignore,
12747 enum built_in_function fcode)
12749 tree fn_fputc, fn_fputs, call = NULL_TREE;
12750 const char *fmt_str = NULL;
12752 /* If the return value is used, don't do the transformation. */
12756 /* Verify the required arguments in the original call. */
12757 if (!validate_arg (fp, POINTER_TYPE))
12759 if (!validate_arg (fmt, POINTER_TYPE))
12762 /* Check whether the format is a literal string constant. */
12763 fmt_str = c_getstr (fmt);
12764 if (fmt_str == NULL)
12767 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12769 /* If we're using an unlocked function, assume the other
12770 unlocked functions exist explicitly. */
12771 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12772 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12776 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12777 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12780 if (!init_target_chars ())
12783 /* If the format doesn't contain % args or %%, use strcpy. */
12784 if (strchr (fmt_str, target_percent) == NULL)
12786 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12790 /* If the format specifier was "", fprintf does nothing. */
12791 if (fmt_str[0] == '\0')
12793 /* If FP has side-effects, just wait until gimplification is
12795 if (TREE_SIDE_EFFECTS (fp))
12798 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12801 /* When "string" doesn't contain %, replace all cases of
12802 fprintf (fp, string) with fputs (string, fp). The fputs
12803 builtin will take care of special cases like length == 1. */
12805 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12808 /* The other optimizations can be done only on the non-va_list variants. */
12809 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12812 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12813 else if (strcmp (fmt_str, target_percent_s) == 0)
12815 if (!arg || !validate_arg (arg, POINTER_TYPE))
12818 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12821 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12822 else if (strcmp (fmt_str, target_percent_c) == 0)
12824 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12827 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12832 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12835 /* Initialize format string characters in the target charset. */
12838 init_target_chars (void)
12843 target_newline = lang_hooks.to_target_charset ('\n');
12844 target_percent = lang_hooks.to_target_charset ('%');
12845 target_c = lang_hooks.to_target_charset ('c');
12846 target_s = lang_hooks.to_target_charset ('s');
12847 if (target_newline == 0 || target_percent == 0 || target_c == 0
12851 target_percent_c[0] = target_percent;
12852 target_percent_c[1] = target_c;
12853 target_percent_c[2] = '\0';
12855 target_percent_s[0] = target_percent;
12856 target_percent_s[1] = target_s;
12857 target_percent_s[2] = '\0';
12859 target_percent_s_newline[0] = target_percent;
12860 target_percent_s_newline[1] = target_s;
12861 target_percent_s_newline[2] = target_newline;
12862 target_percent_s_newline[3] = '\0';
12869 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12870 and no overflow/underflow occurred. INEXACT is true if M was not
12871 exactly calculated. TYPE is the tree type for the result. This
12872 function assumes that you cleared the MPFR flags and then
12873 calculated M to see if anything subsequently set a flag prior to
12874 entering this function. Return NULL_TREE if any checks fail. */
12877 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12879 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12880 overflow/underflow occurred. If -frounding-math, proceed iff the
12881 result of calling FUNC was exact. */
12882 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12883 && (!flag_rounding_math || !inexact))
12885 REAL_VALUE_TYPE rr;
12887 real_from_mpfr (&rr, m, type, GMP_RNDN);
12888 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12889 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12890 but the mpft_t is not, then we underflowed in the
12892 if (real_isfinite (&rr)
12893 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12895 REAL_VALUE_TYPE rmode;
12897 real_convert (&rmode, TYPE_MODE (type), &rr);
12898 /* Proceed iff the specified mode can hold the value. */
12899 if (real_identical (&rmode, &rr))
12900 return build_real (type, rmode);
12906 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12907 number and no overflow/underflow occurred. INEXACT is true if M
12908 was not exactly calculated. TYPE is the tree type for the result.
12909 This function assumes that you cleared the MPFR flags and then
12910 calculated M to see if anything subsequently set a flag prior to
12911 entering this function. Return NULL_TREE if any checks fail, if
12912 FORCE_CONVERT is true, then bypass the checks. */
12915 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12917 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12918 overflow/underflow occurred. If -frounding-math, proceed iff the
12919 result of calling FUNC was exact. */
12921 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12922 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12923 && (!flag_rounding_math || !inexact)))
12925 REAL_VALUE_TYPE re, im;
12927 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12928 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12929 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12930 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12931 but the mpft_t is not, then we underflowed in the
12934 || (real_isfinite (&re) && real_isfinite (&im)
12935 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12936 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12938 REAL_VALUE_TYPE re_mode, im_mode;
12940 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12941 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12942 /* Proceed iff the specified mode can hold the value. */
12944 || (real_identical (&re_mode, &re)
12945 && real_identical (&im_mode, &im)))
12946 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12947 build_real (TREE_TYPE (type), im_mode));
12953 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12954 FUNC on it and return the resulting value as a tree with type TYPE.
12955 If MIN and/or MAX are not NULL, then the supplied ARG must be
12956 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12957 acceptable values, otherwise they are not. The mpfr precision is
12958 set to the precision of TYPE. We assume that function FUNC returns
12959 zero if the result could be calculated exactly within the requested
12963 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12964 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12967 tree result = NULL_TREE;
12971 /* To proceed, MPFR must exactly represent the target floating point
12972 format, which only happens when the target base equals two. */
12973 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12974 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12976 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12978 if (real_isfinite (ra)
12979 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12980 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12982 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12983 const int prec = fmt->p;
12984 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12988 mpfr_init2 (m, prec);
12989 mpfr_from_real (m, ra, GMP_RNDN);
12990 mpfr_clear_flags ();
12991 inexact = func (m, m, rnd);
12992 result = do_mpfr_ckconv (m, type, inexact);
13000 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13001 FUNC on it and return the resulting value as a tree with type TYPE.
13002 The mpfr precision is set to the precision of TYPE. We assume that
13003 function FUNC returns zero if the result could be calculated
13004 exactly within the requested precision. */
13007 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13008 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13010 tree result = NULL_TREE;
13015 /* To proceed, MPFR must exactly represent the target floating point
13016 format, which only happens when the target base equals two. */
13017 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13018 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13019 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13021 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13022 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13024 if (real_isfinite (ra1) && real_isfinite (ra2))
13026 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13027 const int prec = fmt->p;
13028 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13032 mpfr_inits2 (prec, m1, m2, NULL);
13033 mpfr_from_real (m1, ra1, GMP_RNDN);
13034 mpfr_from_real (m2, ra2, GMP_RNDN);
13035 mpfr_clear_flags ();
13036 inexact = func (m1, m1, m2, rnd);
13037 result = do_mpfr_ckconv (m1, type, inexact);
13038 mpfr_clears (m1, m2, NULL);
13045 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13046 FUNC on it and return the resulting value as a tree with type TYPE.
13047 The mpfr precision is set to the precision of TYPE. We assume that
13048 function FUNC returns zero if the result could be calculated
13049 exactly within the requested precision. */
13052 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13053 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13055 tree result = NULL_TREE;
13061 /* To proceed, MPFR must exactly represent the target floating point
13062 format, which only happens when the target base equals two. */
13063 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13064 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13065 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13066 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13068 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13069 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13070 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13072 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13074 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13075 const int prec = fmt->p;
13076 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13080 mpfr_inits2 (prec, m1, m2, m3, NULL);
13081 mpfr_from_real (m1, ra1, GMP_RNDN);
13082 mpfr_from_real (m2, ra2, GMP_RNDN);
13083 mpfr_from_real (m3, ra3, GMP_RNDN);
13084 mpfr_clear_flags ();
13085 inexact = func (m1, m1, m2, m3, rnd);
13086 result = do_mpfr_ckconv (m1, type, inexact);
13087 mpfr_clears (m1, m2, m3, NULL);
13094 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13095 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13096 If ARG_SINP and ARG_COSP are NULL then the result is returned
13097 as a complex value.
13098 The type is taken from the type of ARG and is used for setting the
13099 precision of the calculation and results. */
13102 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13104 tree const type = TREE_TYPE (arg);
13105 tree result = NULL_TREE;
13109 /* To proceed, MPFR must exactly represent the target floating point
13110 format, which only happens when the target base equals two. */
13111 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13112 && TREE_CODE (arg) == REAL_CST
13113 && !TREE_OVERFLOW (arg))
13115 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13117 if (real_isfinite (ra))
13119 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13120 const int prec = fmt->p;
13121 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13122 tree result_s, result_c;
13126 mpfr_inits2 (prec, m, ms, mc, NULL);
13127 mpfr_from_real (m, ra, GMP_RNDN);
13128 mpfr_clear_flags ();
13129 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13130 result_s = do_mpfr_ckconv (ms, type, inexact);
13131 result_c = do_mpfr_ckconv (mc, type, inexact);
13132 mpfr_clears (m, ms, mc, NULL);
13133 if (result_s && result_c)
13135 /* If we are to return in a complex value do so. */
13136 if (!arg_sinp && !arg_cosp)
13137 return build_complex (build_complex_type (type),
13138 result_c, result_s);
13140 /* Dereference the sin/cos pointer arguments. */
13141 arg_sinp = build_fold_indirect_ref (arg_sinp);
13142 arg_cosp = build_fold_indirect_ref (arg_cosp);
13143 /* Proceed if valid pointer type were passed in. */
13144 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13145 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13147 /* Set the values. */
13148 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13150 TREE_SIDE_EFFECTS (result_s) = 1;
13151 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13153 TREE_SIDE_EFFECTS (result_c) = 1;
13154 /* Combine the assignments into a compound expr. */
13155 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13156 result_s, result_c));
13164 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13165 two-argument mpfr order N Bessel function FUNC on them and return
13166 the resulting value as a tree with type TYPE. The mpfr precision
13167 is set to the precision of TYPE. We assume that function FUNC
13168 returns zero if the result could be calculated exactly within the
13169 requested precision. */
13171 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13172 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13173 const REAL_VALUE_TYPE *min, bool inclusive)
13175 tree result = NULL_TREE;
13180 /* To proceed, MPFR must exactly represent the target floating point
13181 format, which only happens when the target base equals two. */
13182 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13183 && host_integerp (arg1, 0)
13184 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13186 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13187 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13190 && real_isfinite (ra)
13191 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13193 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13194 const int prec = fmt->p;
13195 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13199 mpfr_init2 (m, prec);
13200 mpfr_from_real (m, ra, GMP_RNDN);
13201 mpfr_clear_flags ();
13202 inexact = func (m, n, m, rnd);
13203 result = do_mpfr_ckconv (m, type, inexact);
13211 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13212 the pointer *(ARG_QUO) and return the result. The type is taken
13213 from the type of ARG0 and is used for setting the precision of the
13214 calculation and results. */
13217 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13219 tree const type = TREE_TYPE (arg0);
13220 tree result = NULL_TREE;
13225 /* To proceed, MPFR must exactly represent the target floating point
13226 format, which only happens when the target base equals two. */
13227 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13228 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13229 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13231 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13232 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13234 if (real_isfinite (ra0) && real_isfinite (ra1))
13236 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13237 const int prec = fmt->p;
13238 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13243 mpfr_inits2 (prec, m0, m1, NULL);
13244 mpfr_from_real (m0, ra0, GMP_RNDN);
13245 mpfr_from_real (m1, ra1, GMP_RNDN);
13246 mpfr_clear_flags ();
13247 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13248 /* Remquo is independent of the rounding mode, so pass
13249 inexact=0 to do_mpfr_ckconv(). */
13250 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13251 mpfr_clears (m0, m1, NULL);
13254 /* MPFR calculates quo in the host's long so it may
13255 return more bits in quo than the target int can hold
13256 if sizeof(host long) > sizeof(target int). This can
13257 happen even for native compilers in LP64 mode. In
13258 these cases, modulo the quo value with the largest
13259 number that the target int can hold while leaving one
13260 bit for the sign. */
13261 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13262 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13264 /* Dereference the quo pointer argument. */
13265 arg_quo = build_fold_indirect_ref (arg_quo);
13266 /* Proceed iff a valid pointer type was passed in. */
13267 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13269 /* Set the value. */
13270 tree result_quo = fold_build2 (MODIFY_EXPR,
13271 TREE_TYPE (arg_quo), arg_quo,
13272 build_int_cst (NULL, integer_quo));
13273 TREE_SIDE_EFFECTS (result_quo) = 1;
13274 /* Combine the quo assignment with the rem. */
13275 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13276 result_quo, result_rem));
13284 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13285 resulting value as a tree with type TYPE. The mpfr precision is
13286 set to the precision of TYPE. We assume that this mpfr function
13287 returns zero if the result could be calculated exactly within the
13288 requested precision. In addition, the integer pointer represented
13289 by ARG_SG will be dereferenced and set to the appropriate signgam
13293 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13295 tree result = NULL_TREE;
13299 /* To proceed, MPFR must exactly represent the target floating point
13300 format, which only happens when the target base equals two. Also
13301 verify ARG is a constant and that ARG_SG is an int pointer. */
13302 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13303 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13304 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13305 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13307 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13309 /* In addition to NaN and Inf, the argument cannot be zero or a
13310 negative integer. */
13311 if (real_isfinite (ra)
13312 && ra->cl != rvc_zero
13313 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13315 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13316 const int prec = fmt->p;
13317 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13322 mpfr_init2 (m, prec);
13323 mpfr_from_real (m, ra, GMP_RNDN);
13324 mpfr_clear_flags ();
13325 inexact = mpfr_lgamma (m, &sg, m, rnd);
13326 result_lg = do_mpfr_ckconv (m, type, inexact);
13332 /* Dereference the arg_sg pointer argument. */
13333 arg_sg = build_fold_indirect_ref (arg_sg);
13334 /* Assign the signgam value into *arg_sg. */
13335 result_sg = fold_build2 (MODIFY_EXPR,
13336 TREE_TYPE (arg_sg), arg_sg,
13337 build_int_cst (NULL, sg));
13338 TREE_SIDE_EFFECTS (result_sg) = 1;
13339 /* Combine the signgam assignment with the lgamma result. */
13340 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13341 result_sg, result_lg));
13349 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13350 function FUNC on it and return the resulting value as a tree with
13351 type TYPE. The mpfr precision is set to the precision of TYPE. We
13352 assume that function FUNC returns zero if the result could be
13353 calculated exactly within the requested precision. */
13356 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13358 tree result = NULL_TREE;
13362 /* To proceed, MPFR must exactly represent the target floating point
13363 format, which only happens when the target base equals two. */
13364 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13366 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13368 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13369 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13371 if (real_isfinite (re) && real_isfinite (im))
13373 const struct real_format *const fmt =
13374 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13375 const int prec = fmt->p;
13376 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13377 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13381 mpc_init2 (m, prec);
13382 mpfr_from_real (mpc_realref(m), re, rnd);
13383 mpfr_from_real (mpc_imagref(m), im, rnd);
13384 mpfr_clear_flags ();
13385 inexact = func (m, m, crnd);
13386 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13394 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13395 mpc function FUNC on it and return the resulting value as a tree
13396 with type TYPE. The mpfr precision is set to the precision of
13397 TYPE. We assume that function FUNC returns zero if the result
13398 could be calculated exactly within the requested precision. If
13399 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13400 in the arguments and/or results. */
13403 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13404 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13406 tree result = NULL_TREE;
13411 /* To proceed, MPFR must exactly represent the target floating point
13412 format, which only happens when the target base equals two. */
13413 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13414 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13415 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13416 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13417 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13419 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13420 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13421 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13422 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13425 || (real_isfinite (re0) && real_isfinite (im0)
13426 && real_isfinite (re1) && real_isfinite (im1)))
13428 const struct real_format *const fmt =
13429 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13430 const int prec = fmt->p;
13431 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13432 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13436 mpc_init2 (m0, prec);
13437 mpc_init2 (m1, prec);
13438 mpfr_from_real (mpc_realref(m0), re0, rnd);
13439 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13440 mpfr_from_real (mpc_realref(m1), re1, rnd);
13441 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13442 mpfr_clear_flags ();
13443 inexact = func (m0, m0, m1, crnd);
13444 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13454 The functions below provide an alternate interface for folding
13455 builtin function calls presented as GIMPLE_CALL statements rather
13456 than as CALL_EXPRs. The folded result is still expressed as a
13457 tree. There is too much code duplication in the handling of
13458 varargs functions, and a more intrusive re-factoring would permit
13459 better sharing of code between the tree and statement-based
13460 versions of these functions. */
13462 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13463 along with N new arguments specified as the "..." parameters. SKIP
13464 is the number of arguments in STMT to be omitted. This function is used
13465 to do varargs-to-varargs transformations. */
13468 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13470 int oldnargs = gimple_call_num_args (stmt);
13471 int nargs = oldnargs - skip + n;
13472 tree fntype = TREE_TYPE (fndecl);
13473 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13477 location_t loc = gimple_location (stmt);
13479 buffer = XALLOCAVEC (tree, nargs);
13481 for (i = 0; i < n; i++)
13482 buffer[i] = va_arg (ap, tree);
13484 for (j = skip; j < oldnargs; j++, i++)
13485 buffer[i] = gimple_call_arg (stmt, j);
13487 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13490 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13491 a normal call should be emitted rather than expanding the function
13492 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13495 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13497 tree dest, size, len, fn, fmt, flag;
13498 const char *fmt_str;
13499 int nargs = gimple_call_num_args (stmt);
13501 /* Verify the required arguments in the original call. */
13504 dest = gimple_call_arg (stmt, 0);
13505 if (!validate_arg (dest, POINTER_TYPE))
13507 flag = gimple_call_arg (stmt, 1);
13508 if (!validate_arg (flag, INTEGER_TYPE))
13510 size = gimple_call_arg (stmt, 2);
13511 if (!validate_arg (size, INTEGER_TYPE))
13513 fmt = gimple_call_arg (stmt, 3);
13514 if (!validate_arg (fmt, POINTER_TYPE))
13517 if (! host_integerp (size, 1))
13522 if (!init_target_chars ())
13525 /* Check whether the format is a literal string constant. */
13526 fmt_str = c_getstr (fmt);
13527 if (fmt_str != NULL)
13529 /* If the format doesn't contain % args or %%, we know the size. */
13530 if (strchr (fmt_str, target_percent) == 0)
13532 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13533 len = build_int_cstu (size_type_node, strlen (fmt_str));
13535 /* If the format is "%s" and first ... argument is a string literal,
13536 we know the size too. */
13537 else if (fcode == BUILT_IN_SPRINTF_CHK
13538 && strcmp (fmt_str, target_percent_s) == 0)
13544 arg = gimple_call_arg (stmt, 4);
13545 if (validate_arg (arg, POINTER_TYPE))
13547 len = c_strlen (arg, 1);
13548 if (! len || ! host_integerp (len, 1))
13555 if (! integer_all_onesp (size))
13557 if (! len || ! tree_int_cst_lt (len, size))
13561 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13562 or if format doesn't contain % chars or is "%s". */
13563 if (! integer_zerop (flag))
13565 if (fmt_str == NULL)
13567 if (strchr (fmt_str, target_percent) != NULL
13568 && strcmp (fmt_str, target_percent_s))
13572 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13573 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13574 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13578 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13581 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13582 a normal call should be emitted rather than expanding the function
13583 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13584 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13585 passed as second argument. */
13588 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13589 enum built_in_function fcode)
13591 tree dest, size, len, fn, fmt, flag;
13592 const char *fmt_str;
13594 /* Verify the required arguments in the original call. */
13595 if (gimple_call_num_args (stmt) < 5)
13597 dest = gimple_call_arg (stmt, 0);
13598 if (!validate_arg (dest, POINTER_TYPE))
13600 len = gimple_call_arg (stmt, 1);
13601 if (!validate_arg (len, INTEGER_TYPE))
13603 flag = gimple_call_arg (stmt, 2);
13604 if (!validate_arg (flag, INTEGER_TYPE))
13606 size = gimple_call_arg (stmt, 3);
13607 if (!validate_arg (size, INTEGER_TYPE))
13609 fmt = gimple_call_arg (stmt, 4);
13610 if (!validate_arg (fmt, POINTER_TYPE))
13613 if (! host_integerp (size, 1))
13616 if (! integer_all_onesp (size))
13618 if (! host_integerp (len, 1))
13620 /* If LEN is not constant, try MAXLEN too.
13621 For MAXLEN only allow optimizing into non-_ocs function
13622 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13623 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13629 if (tree_int_cst_lt (size, maxlen))
13633 if (!init_target_chars ())
13636 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13637 or if format doesn't contain % chars or is "%s". */
13638 if (! integer_zerop (flag))
13640 fmt_str = c_getstr (fmt);
13641 if (fmt_str == NULL)
13643 if (strchr (fmt_str, target_percent) != NULL
13644 && strcmp (fmt_str, target_percent_s))
13648 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13650 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13651 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13655 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13658 /* Builtins with folding operations that operate on "..." arguments
13659 need special handling; we need to store the arguments in a convenient
13660 data structure before attempting any folding. Fortunately there are
13661 only a few builtins that fall into this category. FNDECL is the
13662 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13663 result of the function call is ignored. */
13666 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13667 bool ignore ATTRIBUTE_UNUSED)
13669 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13670 tree ret = NULL_TREE;
13674 case BUILT_IN_SPRINTF_CHK:
13675 case BUILT_IN_VSPRINTF_CHK:
13676 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13679 case BUILT_IN_SNPRINTF_CHK:
13680 case BUILT_IN_VSNPRINTF_CHK:
13681 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13688 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13689 TREE_NO_WARNING (ret) = 1;
13695 /* A wrapper function for builtin folding that prevents warnings for
13696 "statement without effect" and the like, caused by removing the
13697 call node earlier than the warning is generated. */
13700 fold_call_stmt (gimple stmt, bool ignore)
13702 tree ret = NULL_TREE;
13703 tree fndecl = gimple_call_fndecl (stmt);
13704 location_t loc = gimple_location (stmt);
13706 && TREE_CODE (fndecl) == FUNCTION_DECL
13707 && DECL_BUILT_IN (fndecl)
13708 && !gimple_call_va_arg_pack_p (stmt))
13710 int nargs = gimple_call_num_args (stmt);
13712 if (avoid_folding_inline_builtin (fndecl))
13714 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13716 return targetm.fold_builtin (fndecl, nargs,
13718 ? gimple_call_arg_ptr (stmt, 0)
13719 : &error_mark_node), ignore);
13723 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13725 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13727 for (i = 0; i < nargs; i++)
13728 args[i] = gimple_call_arg (stmt, i);
13729 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13732 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13735 /* Propagate location information from original call to
13736 expansion of builtin. Otherwise things like
13737 maybe_emit_chk_warning, that operate on the expansion
13738 of a builtin, will use the wrong location information. */
13739 if (gimple_has_location (stmt))
13741 tree realret = ret;
13742 if (TREE_CODE (ret) == NOP_EXPR)
13743 realret = TREE_OPERAND (ret, 0);
13744 if (CAN_HAVE_LOCATION_P (realret)
13745 && !EXPR_HAS_LOCATION (realret))
13746 SET_EXPR_LOCATION (realret, loc);
13756 /* Look up the function in built_in_decls that corresponds to DECL
13757 and set ASMSPEC as its user assembler name. DECL must be a
13758 function decl that declares a builtin. */
13761 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13764 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13765 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13768 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13769 set_user_assembler_name (builtin, asmspec);
13770 switch (DECL_FUNCTION_CODE (decl))
13772 case BUILT_IN_MEMCPY:
13773 init_block_move_fn (asmspec);
13774 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13776 case BUILT_IN_MEMSET:
13777 init_block_clear_fn (asmspec);
13778 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13780 case BUILT_IN_MEMMOVE:
13781 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13783 case BUILT_IN_MEMCMP:
13784 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13786 case BUILT_IN_ABORT:
13787 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13790 if (INT_TYPE_SIZE < BITS_PER_WORD)
13792 set_user_assembler_libfunc ("ffs", asmspec);
13793 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13794 MODE_INT, 0), "ffs");
13802 /* Return true if DECL is a builtin that expands to a constant or similarly
13805 is_simple_builtin (tree decl)
13807 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13808 switch (DECL_FUNCTION_CODE (decl))
13810 /* Builtins that expand to constants. */
13811 case BUILT_IN_CONSTANT_P:
13812 case BUILT_IN_EXPECT:
13813 case BUILT_IN_OBJECT_SIZE:
13814 case BUILT_IN_UNREACHABLE:
13815 /* Simple register moves or loads from stack. */
13816 case BUILT_IN_RETURN_ADDRESS:
13817 case BUILT_IN_EXTRACT_RETURN_ADDR:
13818 case BUILT_IN_FROB_RETURN_ADDR:
13819 case BUILT_IN_RETURN:
13820 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13821 case BUILT_IN_FRAME_ADDRESS:
13822 case BUILT_IN_VA_END:
13823 case BUILT_IN_STACK_SAVE:
13824 case BUILT_IN_STACK_RESTORE:
13825 /* Exception state returns or moves registers around. */
13826 case BUILT_IN_EH_FILTER:
13827 case BUILT_IN_EH_POINTER:
13828 case BUILT_IN_EH_COPY_VALUES:
13838 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13839 most probably expanded inline into reasonably simple code. This is a
13840 superset of is_simple_builtin. */
13842 is_inexpensive_builtin (tree decl)
13846 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13848 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13849 switch (DECL_FUNCTION_CODE (decl))
13852 case BUILT_IN_ALLOCA:
13853 case BUILT_IN_BSWAP32:
13854 case BUILT_IN_BSWAP64:
13856 case BUILT_IN_CLZIMAX:
13857 case BUILT_IN_CLZL:
13858 case BUILT_IN_CLZLL:
13860 case BUILT_IN_CTZIMAX:
13861 case BUILT_IN_CTZL:
13862 case BUILT_IN_CTZLL:
13864 case BUILT_IN_FFSIMAX:
13865 case BUILT_IN_FFSL:
13866 case BUILT_IN_FFSLL:
13867 case BUILT_IN_IMAXABS:
13868 case BUILT_IN_FINITE:
13869 case BUILT_IN_FINITEF:
13870 case BUILT_IN_FINITEL:
13871 case BUILT_IN_FINITED32:
13872 case BUILT_IN_FINITED64:
13873 case BUILT_IN_FINITED128:
13874 case BUILT_IN_FPCLASSIFY:
13875 case BUILT_IN_ISFINITE:
13876 case BUILT_IN_ISINF_SIGN:
13877 case BUILT_IN_ISINF:
13878 case BUILT_IN_ISINFF:
13879 case BUILT_IN_ISINFL:
13880 case BUILT_IN_ISINFD32:
13881 case BUILT_IN_ISINFD64:
13882 case BUILT_IN_ISINFD128:
13883 case BUILT_IN_ISNAN:
13884 case BUILT_IN_ISNANF:
13885 case BUILT_IN_ISNANL:
13886 case BUILT_IN_ISNAND32:
13887 case BUILT_IN_ISNAND64:
13888 case BUILT_IN_ISNAND128:
13889 case BUILT_IN_ISNORMAL:
13890 case BUILT_IN_ISGREATER:
13891 case BUILT_IN_ISGREATEREQUAL:
13892 case BUILT_IN_ISLESS:
13893 case BUILT_IN_ISLESSEQUAL:
13894 case BUILT_IN_ISLESSGREATER:
13895 case BUILT_IN_ISUNORDERED:
13896 case BUILT_IN_VA_ARG_PACK:
13897 case BUILT_IN_VA_ARG_PACK_LEN:
13898 case BUILT_IN_VA_COPY:
13899 case BUILT_IN_TRAP:
13900 case BUILT_IN_SAVEREGS:
13901 case BUILT_IN_POPCOUNTL:
13902 case BUILT_IN_POPCOUNTLL:
13903 case BUILT_IN_POPCOUNTIMAX:
13904 case BUILT_IN_POPCOUNT:
13905 case BUILT_IN_PARITYL:
13906 case BUILT_IN_PARITYLL:
13907 case BUILT_IN_PARITYIMAX:
13908 case BUILT_IN_PARITY:
13909 case BUILT_IN_LABS:
13910 case BUILT_IN_LLABS:
13911 case BUILT_IN_PREFETCH:
13915 return is_simple_builtin (decl);